diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 000000000..db8c40a50 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +custom: https://zeronet.io/docs/help_zeronet/donate/ diff --git a/.github/ISSUE_TEMPLATE/bug-report.md b/.github/ISSUE_TEMPLATE/bug-report.md new file mode 100644 index 000000000..b97ad5563 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.md @@ -0,0 +1,33 @@ +--- +name: Bug report +about: Create a report to help us improve ZeroNet +title: '' +labels: '' +assignees: '' + +--- + +### Step 1: Please describe your environment + + * ZeroNet version: _____ + * Operating system: _____ + * Web browser: _____ + * Tor status: not available/always/disabled + * Opened port: yes/no + * Special configuration: ____ + +### Step 2: Describe the problem: + +#### Steps to reproduce: + + 1. _____ + 2. _____ + 3. _____ + +#### Observed Results: + + * What happened? This could be a screenshot, a description, log output (you can send log/debug.log file to hello@zeronet.io if necessary), etc. + +#### Expected Results: + + * What did you expect to happen? diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..fe7c81781 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for ZeroNet +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.gitignore b/.gitignore index 26e671fda..057c422a6 100644 --- a/.gitignore +++ b/.gitignore @@ -18,10 +18,11 @@ data/* # Virtualenv env/* -# Tor, downloaded automatically +# Tor data tools/tor/data -tools/tor/*exe -tools/tor/*dll # PhantomJS, downloaded manually for unit tests -tools/phantomjs \ No newline at end of file +tools/phantomjs + +# ZeroNet config file +zeronet.conf diff --git a/.travis.yml b/.travis.yml index d570e593d..9f214a3fa 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,21 +1,39 @@ language: python python: - 2.7 +services: + - docker install: - pip install -U pip wheel - pip install -r requirements.txt + - pip list before_script: - openssl version -a + # Add an IPv6 config - see the corresponding Travis issue + # https://github.com/travis-ci/travis-ci/issues/8361 + - if [ "${TRAVIS_OS_NAME}" == "linux" ]; then + sudo sh -c 'echo 0 > /proc/sys/net/ipv6/conf/all/disable_ipv6'; + fi script: - - python -m pytest plugins/CryptMessage/Test + - python -m pytest -x plugins/CryptMessage/Test + - python -m pytest -x plugins/Bigfile/Test + - python -m pytest -x plugins/AnnounceLocal/Test + - python -m pytest -x plugins/OptionalManager/Test - python -m pytest src/Test --cov=src --cov-config src/Test/coverage.ini before_install: - - pip install -U pytest mock pytest-cov + - pip install -U pytest mock pytest-cov selenium - pip install codecov - pip install coveralls + - docker build -t zeronet . + - docker run -d -v $PWD:/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 zeronet after_success: - codecov - coveralls --rcfile=src/Test/coverage.ini cache: directories: - - $HOME/.cache/pip \ No newline at end of file + - $HOME/.cache/pip +notifications: + email: + recipients: + hello@zeronet.io + on_success: change diff --git a/CHANGELOG-zh-cn.md b/CHANGELOG-zh-cn.md new file mode 100644 index 000000000..c09ca4010 --- /dev/null +++ b/CHANGELOG-zh-cn.md @@ -0,0 +1,134 @@ +## ZeroNet 0.5.1 (2016-11-18) +### 新增 +- 多语言界面 +- 新插件:为站点 HTML 与 JS 文件提供的翻译助手 +- 每个站点独立的 favicon + +### 修复 +- 并行可选文件下载 + +## ZeroNet 0.5.0 (2016-11-08) +### 新增 +- 新插件:允许在 ZeroHello 列出/删除/固定/管理文件 +- 新的 API 命令来关注用户的可选文件,与可选文件的请求统计 +- 新的可选文件总大小限制 +- 新插件:保存节点到数据库并在重启时保持它们,使得更快的可选文件搜索以及在没有 Tracker 的情况下工作 +- 重写 UPnP 端口打开器 + 退出时关闭端口(感谢 sirMackk!) +- 通过懒惰 PeerHashfield 创建来减少内存占用 +- 在 /Stats 页面加载 JSON 文件统计与数据库信息 + +### 更改 +- 独立的锁定文件来获得更好的 Windows 兼容性 +- 当执行 start.py 时,即使 ZeroNet 已经运行也打开浏览器 +- 在重载时保持插件顺序来允许插件扩展另一个插件 +- 只在完整加载 sites.json 时保存来避免数据丢失 +- 将更多的 Tracker 更改为更可靠的 Tracker +- 更少的 findhashid CPU 使用率 +- 合并下载大量可选文件 +- 更多对于可选文件的其他优化 +- 如果一个站点有 1000 个节点,更积极地清理 +- 为验证错误使用警告而不是错误 +- 首先推送更新到更新的客户端 +- 损坏文件重置改进 + +### 修复 +- 修复启动时出现的站点删除错误 +- 延迟 WebSocket 消息直到连接上 +- 修复如果文件包含额外数据时的数据库导入 +- 修复大站点下载 +- 修复 diff 发送 bug (跟踪它好长时间了) +- 修复当 JSON 文件包含 [] 字符时随机出现的发布错误 +- 修复 siteDelete 与 siteCreate bug +- 修复文件写入确认对话框 + + +## ZeroNet 0.4.1 (2016-09-05) +### 新增 +- 更快启动与更少内存使用的内核改变 +- 尝试连接丢失时重新连接 Tor +- 侧边栏滑入 +- 尝试避免不完整的数据文件被覆盖 +- 更快地打开数据库 +- 在侧边栏显示用户文件大小 +- 依赖 --connection_limit 的并发 worker 数量 + + +### 更改 +- 在空闲 5 分钟后关闭数据库 +- 更好的站点大小计算 +- 允许在域名中使用“-”符号 +- 总是尝试为站点保持连接 +- 移除已合并站点的合并权限 +- 只扫描最后 3 天的新闻源来加快数据库请求 +- 更新 ZeroBundle-win 到 Python 2.7.12 + + +### 修复 +- 修复重要的安全问题:允许任意用户无需有效的来自 ID 提供者的证书发布新内容,感谢 Kaffie 指出 +- 修复在没有选择提供证书提供者时的侧边栏错误 +- 在数据库重建时跳过无效文件 +- 修复随机弹出的 WebSocket 连接错误 +- 修复新的 siteCreate 命令 +- 修复站点大小计算 +- 修复计算机唤醒后的端口打开检查 +- 修复 --size_limit 的命令行解析 + + +## ZeroNet 0.4.0 (2016-08-11) +### 新增 +- 合并站点插件 +- Live source code reloading: Faster core development by allowing me to make changes in ZeroNet source code without restarting it. +- 为合并站点设计的新 JSON 表 +- 从侧边栏重建数据库 +- 允许直接在 JSON 表中存储自定义数据:更简单与快速的 SQL 查询 +- 用户文件存档:允许站点拥有者存档不活跃的用户内容到单个文件(减少初始同步的时间/CPU/内存使用率) +- 在文件删除时同时触发数据库 onUpdated/update +- 从 ZeroFrame API 请求权限 +- 允许使用 fileWrite API 命令在 content.json 存储额外数据 +- 更快的可选文件下载 +- 使用替代源 (Gogs, Gitlab) 来下载更新 +- Track provided sites/connection and prefer to keep the ones with more sites to reduce connection number + +### 更改 +- 保持每个站点至少 5 个连接 +- 将目标站点连接从 10 更改到 15 +- ZeroHello 搜索功能稳定性/速度改进 +- 提升机械硬盘下的客户端性能 + +### 修复 +- 修复 IE11 wrapper nonce 错误 +- 修复在移动设备上的侧边栏 +- 修复站点大小计算 +- 修复 IE10 兼容性 +- Windows XP ZeroBundle 兼容性(感谢中国人民) + + +## ZeroNet 0.3.7 (2016-05-27) +### 更改 +- 通过只传输补丁来减少带宽使用 +- 其他 CPU /内存优化 + + +## ZeroNet 0.3.6 (2016-05-27) +### 新增 +- 新的 ZeroHello +- Newsfeed 函数 + +### 修复 +- 安全性修复 + + +## ZeroNet 0.3.5 (2016-02-02) +### 新增 +- 带有 .onion 隐藏服务的完整 Tor 支持 +- 使用 ZeroNet 协议的 Bootstrap + +### 修复 +- 修复 Gevent 1.0.2 兼容性 + + +## ZeroNet 0.3.4 (2015-12-28) +### 新增 +- AES, ECIES API 函数支持 +- PushState 与 ReplaceState URL 通过 API 的操作支持 +- 多用户 localstorage diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..225e424ab --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,451 @@ +## ZeroNet 0.6.4 (2018-10-20) Rev3660 +### Added + - New plugin: UiConfig. A web interface that allows changing ZeroNet settings. + - New plugin: AnnounceShare. Share trackers between users, automatically announce client's ip as tracker if Bootstrapper plugin is enabled. + - Global tracker stats on ZeroHello: Include statistics from all served sites instead of displaying request statistics only for one site. + - Support custom proxy for trackers. (Configurable with /Config) + - Adding peers to sites manually using zeronet_peers get parameter + - Copy site address with peers link on the sidebar. + - Zip file listing and streaming support for Bigfiles. + - Tracker statistics on /Stats page + - Peer reputation save/restore to speed up sync time after startup. + - Full support fileGet, fileList, dirList calls on tar.gz/zip files. + - Archived_before support to user content rules to allow deletion of all user files before the specified date + - Show and manage "Connecting" sites on ZeroHello + - Add theme support to ZeroNet sites + - Dark theme for ZeroHello, ZeroBlog, ZeroTalk + +### Changed + - Dynamic big file allocation: More efficient storage usage by don't pre-allocate the whole file at the beginning, but expand the size as the content downloads. + - Reduce the request frequency to unreliable trackers. + - Only allow 5 concurrent checkSites to run in parallel to reduce load under Tor/slow connection. + - Stop site downloading if it reached 95% of site limit to avoid download loop for sites out of limit + - The pinned optional files won't be removed from download queue after 30 retries and won't be deleted even if the site owner removes it. + - Don't remove incomplete (downloading) sites on startup + - Remove --pin_bigfile argument as big files are automatically excluded from optional files limit. + +### Fixed + - Trayicon compatibility with latest gevent + - Request number counting for zero:// trackers + - Peer reputation boost for zero:// trackers. + - Blocklist of peers loaded from peerdb (Thanks tangdou1 for report) + - Sidebar map loading on foreign languages (Thx tangdou1 for report) + - FileGet on non-existent files (Thanks mcdev for reporting) + - Peer connecting bug for sites with low amount of peers + +#### "The Vacation" Sandbox escape bug [Reported by GitCenter / Krixano / ZeroLSTN] + +In ZeroNet 0.6.3 Rev3615 and earlier as a result of invalid file type detection, a malicious site could escape the iframe sandbox. + +Result: Browser iframe sandbox escape + +Applied fix: Replaced the previous, file extension based file type identification with a proper one. + +Affected versions: All versions before ZeroNet Rev3616 + + +## ZeroNet 0.6.3 (2018-06-26) +### Added + - New plugin: ContentFilter that allows to have shared site and user block list. + - Support Tor meek proxies to avoid tracker blocking of GFW + - Detect network level tracker blocking and easy setting meek proxy for tracker connections. + - Support downloading 2GB+ sites as .zip (Thx to Radtoo) + - Support ZeroNet as a transparent proxy (Thx to JeremyRand) + - Allow fileQuery as CORS command (Thx to imachug) + - Windows distribution includes Tor and meek client by default + - Download sites as zip link to sidebar + - File server port randomization + - Implicit SSL for all connection + - fileList API command for zip files + - Auto download bigfiles size limit on sidebar + - Local peer number to the sidebar + - Open site directory button in sidebar + +### Changed + - Switched to Azure Tor meek proxy as Amazon one became unavailable + - Refactored/rewritten tracker connection manager + - Improved peer discovery for optional files without opened port + - Also delete Bigfile's piecemap on deletion + +### Fixed + - Important security issue: Iframe sandbox escape [Reported by Ivanq / gitcenter] + - Local peer discovery when running multiple clients on the same machine + - Uploading small files with Bigfile plugin + - Ctrl-c shutdown when running CLI commands + - High CPU/IO usage when Multiuser plugin enabled + - Firefox back button + - Peer discovery on older Linux kernels + - Optional file handling when multiple files have the same hash_id (first 4 chars of the hash) + - Msgpack 0.5.5 and 0.5.6 compatibility + +## ZeroNet 0.6.2 (2018-02-18) + +### Added + - New plugin: AnnounceLocal to make ZeroNet work without an internet connection on the local network. + - Allow dbQuey and userGetSettings using the `as` API command on different sites with Cors permission + - New config option: `--log_level` to reduce log verbosity and IO load + - Prefer to connect to recent peers from trackers first + - Mark peers with port 1 is also unconnectable for future fix for trackers that do not support port 0 announce + +### Changed + - Don't keep connection for sites that have not been modified in the last week + - Change unreliable trackers to new ones + - Send maximum 10 findhash request in one find optional files round (15sec) + - Change "Unique to site" to "No certificate" for default option in cert selection dialog. + - Dont print warnings if not in debug mode + - Generalized tracker logging format + - Only recover sites from sites.json if they had peers + - Message from local peers does not means internet connection + - Removed `--debug_gevent` and turned on Gevent block logging by default + +### Fixed + - Limit connections to 512 to avoid reaching 1024 limit on windows + - Exception when logging foreign operating system socket errors + - Don't send private (local) IPs on pex + - Don't connect to private IPs in tor always mode + - Properly recover data from msgpack unpacker on file stream start + - Symlinked data directory deletion when deleting site using Windows + - De-duplicate peers before publishing + - Bigfile info for non-existing files + + +## ZeroNet 0.6.1 (2018-01-25) + +### Added + - New plugin: Chart + - Collect and display charts about your contribution to ZeroNet network + - Allow list as argument replacement in sql queries. (Thanks to imachug) + - Newsfeed query time statistics (Click on "From XX sites in X.Xs on ZeroHello) + - New UiWebsocket API command: As to run commands as other site + - Ranged ajax queries for big files + - Filter feed by type and site address + - FileNeed, Bigfile upload command compatibility with merger sites + - Send event on port open / tor status change + - More description on permission request + +### Changed + - Reduce memory usage of sidebar geoip database cache + - Change unreliable tracker to new one + - Don't display Cors permission ask if it already granted + - Avoid UI blocking when rebuilding a merger site + - Skip listing ignored directories on signing + - In Multiuser mode show the seed welcome message when adding new certificate instead of first visit + - Faster async port opening on multiple network interfaces + - Allow javascript modals + - Only zoom sidebar globe if mouse button is pressed down + +### Fixed + - Open port checking error reporting (Thanks to imachug) + - Out-of-range big file requests + - Don't output errors happened on gevent greenlets twice + - Newsfeed skip sites with no database + - Newsfeed queries with multiple params + - Newsfeed queries with UNION and UNION ALL + - Fix site clone with sites larger that 10MB + - Unreliable Websocket connection when requesting files from different sites at the same time + + +## ZeroNet 0.6.0 (2017-10-17) + +### Added + - New plugin: Big file support + - Automatic pinning on Big file download + - Enable TCP_NODELAY for supporting sockets + - actionOptionalFileList API command arguments to list non-downloaded files or only big files + - serverShowdirectory API command arguments to allow to display site's directory in OS file browser + - fileNeed API command to initialize optional file downloading + - wrapperGetAjaxKey API command to request nonce for AJAX request + - Json.gz support for database files + - P2P port checking (Thanks for grez911) + - `--download_optional auto` argument to enable automatic optional file downloading for newly added site + - Statistics for big files and protocol command requests on /Stats + - Allow to set user limitation based on auth_address + +### Changed + - More aggressive and frequent connection timeout checking + - Use out of msgpack context file streaming for files larger than 512KB + - Allow optional files workers over the worker limit + - Automatic redirection to wrapper on nonce_error + - Send websocket event on optional file deletion + - Optimize sites.json saving + - Enable faster C-based msgpack packer by default + - Major optimization on Bootstrapper plugin SQL queries + - Don't reset bad file counter on restart, to allow easier give up on unreachable files + - Incoming connection limit changed from 1000 to 500 to avoid reaching socket limit on Windows + - Changed tracker boot.zeronet.io domain, because zeronet.io got banned in some countries + +#### Fixed + - Sub-directories in user directories + +## ZeroNet 0.5.7 (2017-07-19) +### Added + - New plugin: CORS to request read permission to other site's content + - New API command: userSetSettings/userGetSettings to store site's settings in users.json + - Avoid file download if the file size does not match with the requested one + - JavaScript and wrapper less file access using /raw/ prefix ([Example](http://127.0.0.1:43110/raw/1AsRLpuRxr3pb9p3TKoMXPSWHzh6i7fMGi/en.tar.gz/index.html)) + - --silent command line option to disable logging to stdout + + +### Changed + - Better error reporting on sign/verification errors + - More test for sign and verification process + - Update to OpenSSL v1.0.2l + - Limit compressed files to 6MB to avoid zip/tar.gz bomb + - Allow space, [], () characters in filenames + - Disable cross-site resource loading to improve privacy. [Reported by Beardog108] + - Download directly accessed Pdf/Svg/Swf files instead of displaying them to avoid wrapper escape using in JS in SVG file. [Reported by Beardog108] + - Disallow potentially unsafe regular expressions to avoid ReDoS [Reported by MuxZeroNet] + +### Fixed + - Detecting data directory when running Windows distribution exe [Reported by Plasmmer] + - OpenSSL loading under Android 6+ + - Error on exiting when no connection server started + + +## ZeroNet 0.5.6 (2017-06-15) +### Added + - Callback for certSelect API command + - More compact list formatting in json + +### Changed + - Remove obsolete auth_key_sha512 and signature format + - Improved Spanish translation (Thanks to Pupiloho) + +### Fixed + - Opened port checking (Thanks l5h5t7 & saber28 for reporting) + - Standalone update.py argument parsing (Thanks Zalex for reporting) + - uPnP crash on startup (Thanks Vertux for reporting) + - CoffeeScript 1.12.6 compatibility (Thanks kavamaken & imachug) + - Multi value argument parsing + - Database error when running from directory that contains special characters (Thanks Pupiloho for reporting) + - Site lock violation logging + + +#### Proxy bypass during source upgrade [Reported by ZeroMux] + +In ZeroNet before 0.5.6 during the client's built-in source code upgrade mechanism, +ZeroNet did not respect Tor and/or proxy settings. + +Result: ZeroNet downloaded the update without using the Tor network and potentially leaked the connections. + +Fix: Removed the problematic code line from the updater that removed the proxy settings from the socket library. + +Affected versions: ZeroNet 0.5.5 and earlier, Fixed in: ZeroNet 0.5.6 + + +#### XSS vulnerability using DNS rebinding. [Reported by Beardog108] + +In ZeroNet before 0.5.6 the web interface did not validate the request's Host parameter. + +Result: An attacker using a specially crafted DNS entry could have bypassed the browser's cross-site-scripting protection +and potentially gained access to user's private data stored on site. + +Fix: By default ZeroNet only accept connections from 127.0.0.1 and localhost hosts. +If you bind the ui server to an external interface, then it also adds the first http request's host to the allowed host list +or you can define it manually using --ui_host. + +Affected versions: ZeroNet 0.5.5 and earlier, Fixed in: ZeroNet 0.5.6 + + +## ZeroNet 0.5.5 (2017-05-18) +### Added +- Outgoing socket binding by --bind parameter +- Database rebuilding progress bar +- Protect low traffic site's peers from cleanup closing +- Local site blacklisting +- Cloned site source code upgrade from parent +- Input placeholder support for displayPrompt +- Alternative interaction for wrapperConfirm + +### Changed +- New file priorities for faster site display on first visit +- Don't add ? to url if push/replaceState url starts with # + +### Fixed +- PermissionAdd/Remove admin command requirement +- Multi-line confirmation dialog + + +## ZeroNet 0.5.4 (2017-04-14) +### Added +- Major speed and CPU usage enhancements in Tor always mode +- Send skipped modifications to outdated clients + +### Changed +- Upgrade libs to latest version +- Faster port opening and closing +- Deny site limit modification in MultiUser mode + +### Fixed +- Filling database from optional files +- OpenSSL detection on systems with OpenSSL 1.1 +- Users.json corruption on systems with slow hdd +- Fix leaking files in data directory by webui + + +## ZeroNet 0.5.3 (2017-02-27) +### Added +- Tar.gz/zip packed site support +- Utf8 filenames in archive files +- Experimental --db_mode secure database mode to prevent data loss on systems with unreliable power source. +- Admin user support in MultiUser mode +- Optional deny adding new sites in MultiUser mode + +### Changed +- Faster update and publish times by new socket sharing algorithm + +### Fixed +- Fix missing json_row errors when using Mute plugin + + +## ZeroNet 0.5.2 (2017-02-09) +### Added +- User muting +- Win/Mac signed exe/.app +- Signed commits + +### Changed +- Faster site updates after startup +- New macOS package for 10.10 compatibility + +### Fixed +- Fix "New version just released" popup on page first visit +- Fix disappearing optional files bug (Thanks l5h5t7 for reporting) +- Fix skipped updates on unreliable connections (Thanks P2P for reporting) +- Sandbox escape security fix (Thanks Firebox for reporting) +- Fix error reporting on async websocket functions + + +## ZeroNet 0.5.1 (2016-11-18) +### Added +- Multi language interface +- New plugin: Translation helper for site html and js files +- Per-site favicon + +### Fixed +- Parallel optional file downloading + + +## ZeroNet 0.5.0 (2016-11-08) +### Added +- New Plugin: Allow list/delete/pin/manage files on ZeroHello +- New API commands to follow user's optional files, and query stats for optional files +- Set total size limit on optional files. +- New Plugin: Save peers to database and keep them between restarts to allow more faster optional file search and make it work without trackers +- Rewritten uPnP port opener + close port on exit (Thanks to sirMackk!) +- Lower memory usage by lazy PeerHashfield creation +- Loaded json files statistics and database info at /Stats page + +### Changed +- Separate lock file for better Windows compatibility +- When executing start.py open browser even if ZeroNet is already running +- Keep plugin order after reload to allow plugins to extends an another plug-in +- Only save sites.json if fully loaded to avoid data loss +- Change aletorrenty tracker to a more reliable one +- Much lower findhashid CPU usage +- Pooled downloading of large amount of optional files +- Lots of other optional file changes to make it better +- If we have 1000 peers for a site make cleanup more aggressive +- Use warning instead of error on verification errors +- Push updates to newer clients first +- Bad file reset improvements + +### Fixed +- Fix site deletion errors on startup +- Delay websocket messages until it's connected +- Fix database import if data file contains extra data +- Fix big site download +- Fix diff sending bug (been chasing it for a long time) +- Fix random publish errors when json file contained [] characters +- Fix site delete and siteCreate bug +- Fix file write confirmation dialog + + +## ZeroNet 0.4.1 (2016-09-05) +### Added +- Major core changes to allow fast startup and lower memory usage +- Try to reconnect to Tor on lost connection +- Sidebar fade-in +- Try to avoid incomplete data files overwrite +- Faster database open +- Display user file sizes in sidebar +- Concurrent worker number depends on --connection_limit + +### Changed +- Close databases after 5 min idle time +- Better site size calculation +- Allow "-" character in domains +- Always try to keep connections for sites +- Remove merger permission from merged sites +- Newsfeed scans only last 3 days to speed up database queries +- Updated ZeroBundle-win to Python 2.7.12 + +### Fixed +- Fix for important security problem, which is allowed anyone to publish new content without valid certificate from ID provider. Thanks Kaffie for pointing it out! +- Fix sidebar error when no certificate provider selected +- Skip invalid files on database rebuilding +- Fix random websocket connection error popups +- Fix new siteCreate command +- Fix site size calculation +- Fix port open checking after computer wake up +- Fix --size_limit parsing from command line + + +## ZeroNet 0.4.0 (2016-08-11) +### Added +- Merger site plugin +- Live source code reloading: Faster core development by allowing me to make changes in ZeroNet source code without restarting it. +- New json table format for merger sites +- Database rebuild from sidebar. +- Allow to store custom data directly in json table: Much simpler and faster SQL queries. +- User file archiving: Allows the site owner to archive inactive user's content into single file. (Reducing initial sync time/cpu/memory usage) +- Also trigger onUpdated/update database on file delete. +- Permission request from ZeroFrame API. +- Allow to store extra data in content.json using fileWrite API command. +- Faster optional files downloading +- Use alternative sources (Gogs, Gitlab) to download updates +- Track provided sites/connection and prefer to keep the ones with more sites to reduce connection number + +### Changed +- Keep at least 5 connection per site +- Changed target connection for sites to 10 from 15 +- ZeroHello search function stability/speed improvements +- Improvements for clients with slower HDD + +### Fixed +- Fix IE11 wrapper nonce errors +- Fix sidebar on mobile devices +- Fix site size calculation +- Fix IE10 compatibility +- Windows XP ZeroBundle compatibility (THX to people of China) + + +## ZeroNet 0.3.7 (2016-05-27) +### Changed +- Patch command to reduce bandwidth usage by transfer only the changed lines +- Other cpu/memory optimizations + + +## ZeroNet 0.3.6 (2016-05-27) +### Added +- New ZeroHello +- Newsfeed function + +### Fixed +- Security fixes + + +## ZeroNet 0.3.5 (2016-02-02) +### Added +- Full Tor support with .onion hidden services +- Bootstrap using ZeroNet protocol + +### Fixed +- Fix Gevent 1.0.2 compatibility + + +## ZeroNet 0.3.4 (2015-12-28) +### Added +- AES, ECIES API function support +- PushState and ReplaceState url manipulation support in API +- Multiuser localstorage diff --git a/Dockerfile b/Dockerfile index 350fff9f3..7fcd83cae 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,32 +1,26 @@ -FROM ubuntu:16.04 - -MAINTAINER Felix Imobersteg +FROM alpine:3.8 #Base settings -ENV DEBIAN_FRONTEND noninteractive ENV HOME /root #Install ZeroNet -RUN \ - apt-get update -y; \ - apt-get -y install msgpack-python python-gevent python-pip python-dev tor; \ - pip install msgpack-python --upgrade; \ - apt-get clean -y; \ - rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*; \ - echo "ControlPort 9051" >> /etc/tor/torrc; \ - echo "CookieAuthentication 1" >> /etc/tor/torrc - +RUN apk --no-cache --no-progress add musl-dev gcc python python-dev py2-pip tor openssl \ + && pip install --no-cache-dir gevent msgpack \ + && apk del musl-dev gcc python-dev py2-pip \ + && echo "ControlPort 9051" >> /etc/tor/torrc \ + && echo "CookieAuthentication 1" >> /etc/tor/torrc #Add Zeronet source -ADD . /root +COPY . /root VOLUME /root/data #Control if Tor proxy is started ENV ENABLE_TOR false +WORKDIR /root + #Set upstart command -CMD cd /root && (! ${ENABLE_TOR} || /etc/init.d/tor start) && python zeronet.py --ui_ip 0.0.0.0 +CMD (! ${ENABLE_TOR} || tor&) && python zeronet.py --ui_ip 0.0.0.0 --fileserver_port 26552 #Expose ports -EXPOSE 43110 -EXPOSE 15441 +EXPOSE 43110 26552 diff --git a/README-ru.md b/README-ru.md new file mode 100644 index 000000000..75abbfab9 --- /dev/null +++ b/README-ru.md @@ -0,0 +1,211 @@ +# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/) + +[简体中文](./README-zh-cn.md) +[English](./README.md) + +Децентрализованные вебсайты использующие Bitcoin криптографию и BitTorrent сеть - https://zeronet.io + + +## Зачем? + +* Мы верим в открытую, свободную, и не отцензуренную сеть и коммуникацию. +* Нет единой точки отказа: Сайт онлайн пока по крайней мере 1 пир обслуживает его. +* Никаких затрат на хостинг: Сайты обслуживаются посетителями. +* Невозможно отключить: Он нигде, потому что он везде. +* Быстр и работает оффлайн: Вы можете получить доступ к сайту, даже если Интернет недоступен. + + +## Особенности + * Обновляемые в реальном времени сайты + * Поддержка Namecoin .bit доменов + * Лёгок в установке: распаковал & запустил + * Клонирование вебсайтов в один клик + * Password-less [BIP32](https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) + based authorization: Ваша учетная запись защищена той же криптографией, что и ваш Bitcoin-кошелек + * Встроенный SQL-сервер с синхронизацией данных P2P: Позволяет упростить разработку сайта и ускорить загрузку страницы + * Анонимность: Полная поддержка сети Tor с помощью скрытых служб .onion вместо адресов IPv4 + * TLS зашифрованные связи + * Автоматическое открытие uPnP порта + * Плагин для поддержки многопользовательской (openproxy) + * Работает с любыми браузерами и операционными системами + + +## Как это работает? + +* После запуска `zeronet.py` вы сможете посетить зайты (zeronet сайты) используя адрес + `http://127.0.0.1:43110/{zeronet_address}` +(например. `http://127.0.0.1:43110/1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D`). +* Когда вы посещаете новый сайт zeronet, он пытается найти пиров с помощью BitTorrent + чтобы загрузить файлы сайтов (html, css, js ...) из них. +* Каждый посещенный зайт также обслуживается вами. (Т.е хранится у вас на компьютере) +* Каждый сайт содержит файл `content.json`, который содержит все остальные файлы в хэше sha512 + и подпись, созданную с использованием частного ключа сайта. +* Если владелец сайта (у которого есть закрытый ключ для адреса сайта) изменяет сайт, то он/она + подписывает новый `content.json` и публикует его для пиров. После этого пиры проверяют целостность `content.json` + (используя подпись), они загружают измененные файлы и публикуют новый контент для других пиров. + +#### [Слайд-шоу о криптографии ZeroNet, обновлениях сайтов, многопользовательских сайтах »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) +#### [Часто задаваемые вопросы »](https://zeronet.io/docs/faq/) + +#### [Документация разработчика ZeroNet »](https://zeronet.io/docs/site_development/getting_started/) + + +## Скриншоты + +![Screenshot](https://i.imgur.com/H60OAHY.png) +![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) + +#### [Больше скриншотов в ZeroNet документации »](https://zeronet.io/docs/using_zeronet/sample_sites/) + + +## Как вступить + +* Скачайте ZeroBundle пакет: + * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip) + * [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip) + * [Linux 64-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) + * [Linux 32-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) +* Распакуйте где угодно +* Запустите `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) + +### Linux терминал + +* `wget https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz` +* `tar xvpfz ZeroBundle-linux64.tar.gz` +* `cd ZeroBundle` +* Запустите с помощью `./ZeroNet.sh` + +Он загружает последнюю версию ZeroNet, затем запускает её автоматически. + +#### Ручная установка для Debian Linux + +* `sudo apt-get update` +* `sudo apt-get install msgpack-python python-gevent` +* `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` +* `tar xvpfz master.tar.gz` +* `cd ZeroNet-master` +* Запустите с помощью `python2 zeronet.py` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +### [Arch Linux](https://www.archlinux.org) + +* `git clone https://aur.archlinux.org/zeronet.git` +* `cd zeronet` +* `makepkg -srci` +* `systemctl start zeronet` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +Смотрите [ArchWiki](https://wiki.archlinux.org)'s [ZeroNet +article](https://wiki.archlinux.org/index.php/ZeroNet) для дальнейшей помощи. + +### [Gentoo Linux](https://www.gentoo.org) + +* [`layman -a raiagent`](https://github.com/leycec/raiagent) +* `echo '>=net-vpn/zeronet-0.5.4' >> /etc/portage/package.accept_keywords` +* *(Опционально)* Включить поддержку Tor: `echo 'net-vpn/zeronet tor' >> + /etc/portage/package.use` +* `emerge zeronet` +* `rc-service zeronet start` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +Смотрите `/usr/share/doc/zeronet-*/README.gentoo.bz2` для дальнейшей помощи. + +### [FreeBSD](https://www.freebsd.org/) + +* `pkg install zeronet` or `cd /usr/ports/security/zeronet/ && make install clean` +* `sysrc zeronet_enable="YES"` +* `service zeronet start` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +### [Vagrant](https://www.vagrantup.com/) + +* `vagrant up` +* Подключитесь к VM с помощью `vagrant ssh` +* `cd /vagrant` +* Запустите `python2 zeronet.py --ui_ip 0.0.0.0` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +### [Docker](https://www.docker.com/) +* `docker run -d -v :/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet` +* Это изображение Docker включает в себя прокси-сервер Tor, который по умолчанию отключён. + Остерегайтесь что некоторые хостинг-провайдеры могут не позволить вам запускать Tor на своих серверах. + Если вы хотите включить его,установите переменную среды `ENABLE_TOR` в` true` (по умолчанию: `false`) Например: + + `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/) + +* `virtualenv env` +* `source env/bin/activate` +* `pip install msgpack gevent` +* `python2 zeronet.py` +* Откройте http://127.0.0.1:43110/ в вашем браузере. + +## Текущие ограничения + +* ~~Нет torrent-похожего файла разделения для поддержки больших файлов~~ (поддержка больших файлов добавлена) +* ~~Не анонимнее чем Bittorrent~~ (добавлена встроенная поддержка Tor) +* Файловые транзакции не сжаты ~~ или незашифрованы еще ~~ (добавлено шифрование TLS) +* Нет приватных сайтов + + +## Как я могу создать сайт в Zeronet? + +Завершите работу zeronet, если он запущен + +```bash +$ zeronet.py siteCreate +... +- Site private key (Приватный ключ сайта): 23DKQpzxhbVBrAtvLEc2uvk7DZweh4qL3fn3jpM3LgHDczMK2TtYUq +- Site address (Адрес сайта): 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +- Site created! (Сайт создан) +$ zeronet.py +... +``` + +Поздравляем, вы закончили! Теперь каждый может получить доступ к вашему зайту используя +`http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` + +Следующие шаги: [ZeroNet Developer Documentation](https://zeronet.io/docs/site_development/getting_started/) + + +## Как я могу модифицировать Zeronet сайт? + +* Измените файлы расположенные в data/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 директории. + Когда закончите с изменением: + +```bash +$ zeronet.py siteSign 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +- Signing site (Подпись сайта): 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2... +Private key (Приватный ключ) (input hidden): +``` + +* Введите секретный ключ, который вы получили при создании сайта, потом: + +```bash +$ zeronet.py sitePublish 13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2 +... +Site:13DNDk..bhC2 Publishing to 3/10 peers... +Site:13DNDk..bhC2 Successfuly published to 3 peers +- Serving files.... +``` + +* Вот и всё! Вы успешно подписали и опубликовали свои изменения. + + +## Поддержите проект + +- Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX +- Paypal: https://zeronet.io/docs/help_zeronet/donate/ + +### Спонсоры + +* Улучшенная совместимость с MacOS / Safari стала возможной благодаря [BrowserStack.com](https://www.browserstack.com) + +#### Спасибо! + +* Больше информации, помощь, журнал изменений, zeronet сайты: https://www.reddit.com/r/zeronet/ +* Приходите, пообщайтесь с нами: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) или на [gitter](https://gitter.im/HelloZeroNet/ZeroNet) +* Email: hello@zeronet.io (PGP: CB9613AE) diff --git a/README-zh-cn.md b/README-zh-cn.md index 5bd627718..103194ea3 100644 --- a/README-zh-cn.md +++ b/README-zh-cn.md @@ -1,4 +1,4 @@ -# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.readthedocs.org/en/latest/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/) +# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/) [English](./README.md) @@ -42,33 +42,29 @@ * 如果站点的所有者 (拥有私钥的那个人) 修改了站点, 并且他/她签名了新的 `content.json` 然后推送至其他节点, 那么所有节点将会在验证 `content.json` 的真实性 (使用签名)后, 下载修改后的文件并推送至其他节点。 -#### [有关于 ZeroNet 加密, 站点更新, 多用户站点的幻灯片 »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -#### [常见问题 »](https://zeronet.readthedocs.org/en/latest/faq/) +#### [有关于 ZeroNet 加密, 站点更新, 多用户站点的幻灯片 »](https://docs.google.com/presentation/d/1qBxkroB_iiX2zHEn0dt-N-qRZgyEzui46XS2hEa3AA4/pub?start=false&loop=false&delayms=3000) +#### [常见问题 »](https://zeronet.io/docs/faq/) -#### [ZeroNet开发者文档 »](https://zeronet.readthedocs.org/en/latest/site_development/getting_started/) +#### [ZeroNet开发者文档 »](https://zeronet.io/docs/site_development/getting_started/) ## 屏幕截图 ![Screenshot](https://i.imgur.com/H60OAHY.png) -![ZeroTalk](https://zeronet.readthedocs.org/en/latest/img/zerotalk.png) +![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) -#### [在 ZeroNet 文档里查看更多的屏幕截图 »](https://zeronet.readthedocs.org/en/latest/using_zeronet/sample_sites/) +#### [在 ZeroNet 文档里查看更多的屏幕截图 »](https://zeronet.io/docs/using_zeronet/sample_sites/) ## 如何加入 ? * 下载 ZeroBundle 文件包: - * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-win.zip) - * [Apple OS X](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-mac-osx.zip) + * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip) + * [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip) * [Linux 64bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) * [Linux 32bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) * 解压缩 -* 运行 `ZeroNet.cmd` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) - -如果你在 OS X 上遇到了 "classic environment no longer supported" 错误,请打开一个终端然后把 ZeroNet.app 拖进去 - -在你打开时他将会自动下载最新版本的 ZeroNet 。 +* 运行 `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) ### Linux 命令行 @@ -86,37 +82,43 @@ * `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` * `tar xvpfz master.tar.gz` * `cd ZeroNet-master` -* 执行 `python zeronet.py` 来启动 +* 执行 `python2 zeronet.py` 来启动 * 在你的浏览器中打开 http://127.0.0.1:43110/ +### [FreeBSD](https://www.freebsd.org/) + +* `pkg install zeronet` 或者 `cd /usr/ports/security/zeronet/ && make install clean` +* `sysrc zeronet_enable="YES"` +* `service zeronet start` +* 在你的浏览器中打开 http://127.0.0.1:43110/ ### [Vagrant](https://www.vagrantup.com/) * `vagrant up` * 通过 `vagrant ssh` 连接到 VM * `cd /vagrant` -* 运行 `python zeronet.py --ui_ip 0.0.0.0` +* 运行 `python2 zeronet.py --ui_ip 0.0.0.0` * 在你的浏览器中打开 http://127.0.0.1:43110/ ### [Docker](https://www.docker.com/) -* `docker run -d -v :/root/data -p 15441:15441 -p 43110:43110 nofish/zeronet` +* `docker run -d -v :/root/data -p 26552:26552 -p 43110:43110 nofish/zeronet` * 这个 Docker 镜像包含了 Tor ,但默认是禁用的,因为一些托管商不允许你在他们的服务器上运行 Tor。如果你希望启用它, 设置 `ENABLE_TOR` 环境变量为 `true` (默认: `false`). E.g.: - `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 15441:15441 -p 43110:43110 nofish/zeronet` + `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 26552:26552 -p 43110:43110 nofish/zeronet` * 在你的浏览器中打开 http://127.0.0.1:43110/ ### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/) * `virtualenv env` * `source env/bin/activate` -* `pip install msgpack-python gevent` -* `python zeronet.py` +* `pip install msgpack gevent` +* `python2 zeronet.py` * 在你的浏览器中打开 http://127.0.0.1:43110/ ## 现有限制 -* 没有类似于 BitTorrent 的文件拆分来支持大文件 +* ~~没有类似于 BitTorrent 的文件拆分来支持大文件~~ (已添加大文件支持) * ~~没有比 BitTorrent 更好的匿名性~~ (已添加内置的完整 Tor 支持) * 传输文件时没有压缩~~和加密~~ (已添加 TLS 支持) * 不支持私有站点 @@ -142,7 +144,7 @@ $ zeronet.py `http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` 来访问你的站点 -下一步: [ZeroNet 开发者文档](https://zeronet.readthedocs.org/en/latest/site_development/getting_started/) +下一步: [ZeroNet 开发者文档](https://zeronet.io/docs/site_development/getting_started/) ## 我要如何修改 ZeroNet 站点? @@ -172,8 +174,7 @@ Site:13DNDk..bhC2 Successfuly published to 3 peers ## 帮助这个项目 - Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX -- Paypal: https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/ -- Gratipay: https://gratipay.com/zeronet/ +- Paypal: https://zeronet.io/docs/help_zeronet/donate/ ### 赞助商 diff --git a/README.md b/README.md index 069ab3589..07d09ddb6 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ -# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.readthedocs.org/en/latest/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/) +# ZeroNet [![Build Status](https://travis-ci.org/HelloZeroNet/ZeroNet.svg?branch=master)](https://travis-ci.org/HelloZeroNet/ZeroNet) [![Documentation](https://img.shields.io/badge/docs-faq-brightgreen.svg)](https://zeronet.io/docs/faq/) [![Help](https://img.shields.io/badge/keep_this_project_alive-donate-yellow.svg)](https://zeronet.io/docs/help_zeronet/donate/) [简体中文](./README-zh-cn.md) +[Русский](./README-ru.md) Decentralized websites using Bitcoin crypto and the BitTorrent network - https://zeronet.io @@ -48,34 +49,30 @@ Decentralized websites using Bitcoin crypto and the BitTorrent network - https:/ other peers. #### [Slideshow about ZeroNet cryptography, site updates, multi-user sites »](https://docs.google.com/presentation/d/1_2qK1IuOKJ51pgBvllZ9Yu7Au2l551t3XBgyTSvilew/pub?start=false&loop=false&delayms=3000) -#### [Frequently asked questions »](https://zeronet.readthedocs.org/en/latest/faq/) +#### [Frequently asked questions »](https://zeronet.io/docs/faq/) -#### [ZeroNet Developer Documentation »](https://zeronet.readthedocs.org/en/latest/site_development/getting_started/) +#### [ZeroNet Developer Documentation »](https://zeronet.io/docs/site_development/getting_started/) ## Screenshots ![Screenshot](https://i.imgur.com/H60OAHY.png) -![ZeroTalk](https://zeronet.readthedocs.org/en/latest/img/zerotalk.png) +![ZeroTalk](https://zeronet.io/docs/img/zerotalk.png) -#### [More screenshots in ZeroNet docs »](https://zeronet.readthedocs.org/en/latest/using_zeronet/sample_sites/) +#### [More screenshots in ZeroNet docs »](https://zeronet.io/docs/using_zeronet/sample_sites/) ## How to join * Download ZeroBundle package: - * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-win.zip) - * [Apple OS X](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-mac-osx.zip) - * [Linux 64bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) - * [Linux 32bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) + * [Microsoft Windows](https://github.com/HelloZeroNet/ZeroNet-win/archive/dist/ZeroNet-win.zip) + * [Apple macOS](https://github.com/HelloZeroNet/ZeroNet-mac/archive/dist/ZeroNet-mac.zip) + * [Linux x86/64-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz) + * [Linux x86/32-bit](https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux32.tar.gz) * Unpack anywhere -* Run `ZeroNet.cmd` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) +* Run `ZeroNet.exe` (win), `ZeroNet(.app)` (osx), `ZeroNet.sh` (linux) -If you get "classic environment no longer supported" error on OS X: Open a Terminal window and drop ZeroNet.app on it - -It downloads the latest version of ZeroNet then starts it automatically. - -### Linux terminal +### Linux terminal on x86-64 * `wget https://github.com/HelloZeroNet/ZeroBundle/raw/master/dist/ZeroBundle-linux64.tar.gz` * `tar xvpfz ZeroBundle-linux64.tar.gz` @@ -91,38 +88,73 @@ It downloads the latest version of ZeroNet then starts it automatically. * `wget https://github.com/HelloZeroNet/ZeroNet/archive/master.tar.gz` * `tar xvpfz master.tar.gz` * `cd ZeroNet-master` -* Start with `python zeronet.py` +* Start with `python2 zeronet.py` +* Open http://127.0.0.1:43110/ in your browser + +### [Whonix](https://www.whonix.org) + +* [Instructions](https://www.whonix.org/wiki/ZeroNet) + +### [Arch Linux](https://www.archlinux.org) + +* `git clone https://aur.archlinux.org/zeronet.git` +* `cd zeronet` +* `makepkg -srci` +* `systemctl start zeronet` * Open http://127.0.0.1:43110/ in your browser +See [ArchWiki](https://wiki.archlinux.org)'s [ZeroNet +article](https://wiki.archlinux.org/index.php/ZeroNet) for further assistance. + +### [Gentoo Linux](https://www.gentoo.org) + +* [`eselect repository enable raiagent`](https://github.com/leycec/raiagent) +* `emerge --sync` +* `echo 'net-vpn/zeronet' >> /etc/portage/package.accept_keywords` +* *(Optional)* Enable Tor support: `echo 'net-vpn/zeronet tor' >> + /etc/portage/package.use` +* `emerge zeronet` +* `rc-service zeronet start` +* *(Optional)* Enable zeronet at runlevel "default": `rc-update add zeronet` +* Open http://127.0.0.1:43110/ in your browser + +See `/usr/share/doc/zeronet-*/README.gentoo.bz2` for further assistance. + +### [FreeBSD](https://www.freebsd.org/) + +* `pkg install zeronet` or `cd /usr/ports/security/zeronet/ && make install clean` +* `sysrc zeronet_enable="YES"` +* `service zeronet start` +* Open http://127.0.0.1:43110/ in your browser ### [Vagrant](https://www.vagrantup.com/) * `vagrant up` * Access VM with `vagrant ssh` * `cd /vagrant` -* Run `python zeronet.py --ui_ip 0.0.0.0` +* Run `python2 zeronet.py --ui_ip 0.0.0.0` * Open http://127.0.0.1:43110/ in your browser ### [Docker](https://www.docker.com/) -* `docker run -d -v :/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet` +* `docker run -d -v :/root/data -p 26552:26552 -p 127.0.0.1:43110:43110 nofish/zeronet` * This Docker image includes the Tor proxy, which is disabled by default. Beware that some hosting providers may not allow you running Tor in their servers. If you want to enable it, set `ENABLE_TOR` environment variable to `true` (Default: `false`). E.g.: - `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 15441:15441 -p 127.0.0.1:43110:43110 nofish/zeronet` + `docker run -d -e "ENABLE_TOR=true" -v :/root/data -p 26552:26552 -p 127.0.0.1:43110:43110 nofish/zeronet` * Open http://127.0.0.1:43110/ in your browser ### [Virtualenv](https://virtualenv.readthedocs.org/en/latest/) * `virtualenv env` * `source env/bin/activate` -* `pip install msgpack-python gevent` -* `python zeronet.py` +* `pip install msgpack gevent` +* `python2 zeronet.py` * Open http://127.0.0.1:43110/ in your browser ## Current limitations -* No torrent-like file splitting for big file support +* ~~No torrent-like file splitting for big file support~~ (big file support added) * ~~No more anonymous than Bittorrent~~ (built-in full Tor support added) * File transactions are not compressed ~~or encrypted yet~~ (TLS encryption added) * No private sites @@ -146,7 +178,7 @@ $ zeronet.py Congratulations, you're finished! Now anyone can access your site using `http://localhost:43110/13DNDkMUExRf9Xa9ogwPKqp7zyHFEqbhC2` -Next steps: [ZeroNet Developer Documentation](https://zeronet.readthedocs.org/en/latest/site_development/getting_started/) +Next steps: [ZeroNet Developer Documentation](https://zeronet.io/docs/site_development/getting_started/) ## How can I modify a ZeroNet site? @@ -176,15 +208,14 @@ Site:13DNDk..bhC2 Successfuly published to 3 peers ## Help keep this project alive - Bitcoin: 1QDhxQ6PraUZa21ET5fYUCPgdrwBomnFgX -- Paypal: https://zeronet.readthedocs.org/en/latest/help_zeronet/donate/ -- Gratipay: https://gratipay.com/zeronet/ +- Paypal: https://zeronet.io/docs/help_zeronet/donate/ ### Sponsors -* Better OSX/Safari compatibility made possible by [BrowserStack.com](https://www.browserstack.com) +* Better macOS/Safari compatibility made possible by [BrowserStack.com](https://www.browserstack.com) #### Thank you! * More info, help, changelog, zeronet sites: https://www.reddit.com/r/zeronet/ * Come, chat with us: [#zeronet @ FreeNode](https://kiwiirc.com/client/irc.freenode.net/zeronet) or on [gitter](https://gitter.im/HelloZeroNet/ZeroNet) -* Email: hello@noloop.me +* Email: hello@zeronet.io (PGP: CB9613AE) diff --git a/Vagrantfile b/Vagrantfile index 6c4da8945..24fe0c45f 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -40,6 +40,6 @@ Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| config.vm.provision "shell", inline: "sudo apt-get install msgpack-python python-gevent python-pip python-dev -y" config.vm.provision "shell", - inline: "sudo pip install msgpack-python --upgrade" + inline: "sudo pip install msgpack --upgrade" end diff --git a/plugins/AnnounceLocal/AnnounceLocalPlugin.py b/plugins/AnnounceLocal/AnnounceLocalPlugin.py new file mode 100644 index 000000000..27b4d38a9 --- /dev/null +++ b/plugins/AnnounceLocal/AnnounceLocalPlugin.py @@ -0,0 +1,148 @@ +import time + +import gevent + +from Plugin import PluginManager +from Config import config +import BroadcastServer + + +@PluginManager.registerTo("SiteAnnouncer") +class SiteAnnouncerPlugin(object): + def announce(self, force=False, *args, **kwargs): + local_announcer = self.site.connection_server.local_announcer + + thread = None + if local_announcer and (force or time.time() - local_announcer.last_discover > 5 * 60): + thread = gevent.spawn(local_announcer.discover, force=force) + back = super(SiteAnnouncerPlugin, self).announce(force=force, *args, **kwargs) + + if thread: + thread.join() + + return back + + +class LocalAnnouncer(BroadcastServer.BroadcastServer): + def __init__(self, server, listen_port): + super(LocalAnnouncer, self).__init__("zeronet", listen_port=listen_port) + self.server = server + + self.sender_info["peer_id"] = self.server.peer_id + self.sender_info["port"] = self.server.port + self.sender_info["broadcast_port"] = listen_port + self.sender_info["rev"] = config.rev + + self.known_peers = {} + self.last_discover = 0 + + def discover(self, force=False): + self.log.debug("Sending discover request (force: %s)" % force) + self.last_discover = time.time() + if force: # Probably new site added, clean cache + self.known_peers = {} + + for peer_id, known_peer in self.known_peers.items(): + if time.time() - known_peer["found"] > 20 * 60: + del(self.known_peers[peer_id]) + self.log.debug("Timeout, removing from known_peers: %s" % peer_id) + self.broadcast({"cmd": "discoverRequest", "params": {}}, port=self.listen_port) + + def actionDiscoverRequest(self, sender, params): + back = { + "cmd": "discoverResponse", + "params": { + "sites_changed": self.server.site_manager.sites_changed + } + } + + if sender["peer_id"] not in self.known_peers: + self.known_peers[sender["peer_id"]] = {"added": time.time(), "sites_changed": 0, "updated": 0, "found": time.time()} + self.log.debug("Got discover request from unknown peer %s (%s), time to refresh known peers" % (sender["ip"], sender["peer_id"])) + gevent.spawn_later(1.0, self.discover) # Let the response arrive first to the requester + + return back + + def actionDiscoverResponse(self, sender, params): + if sender["peer_id"] in self.known_peers: + self.known_peers[sender["peer_id"]]["found"] = time.time() + if params["sites_changed"] != self.known_peers.get(sender["peer_id"], {}).get("sites_changed"): + # Peer's site list changed, request the list of new sites + return {"cmd": "siteListRequest"} + else: + # Peer's site list is the same + for site in self.server.sites.values(): + peer = site.peers.get("%s:%s" % (sender["ip"], sender["port"])) + if peer: + peer.found("local") + + def actionSiteListRequest(self, sender, params): + back = [] + sites = self.server.sites.values() + + # Split adresses to group of 100 to avoid UDP size limit + site_groups = [sites[i:i + 100] for i in range(0, len(sites), 100)] + for site_group in site_groups: + res = {} + res["sites_changed"] = self.server.site_manager.sites_changed + res["sites"] = [site.address_hash for site in site_group] + back.append({"cmd": "siteListResponse", "params": res}) + return back + + def actionSiteListResponse(self, sender, params): + s = time.time() + peer_sites = set(params["sites"]) + num_found = 0 + added_sites = [] + for site in self.server.sites.values(): + if site.address_hash in peer_sites: + added = site.addPeer(sender["ip"], sender["port"], source="local") + num_found += 1 + if added: + site.worker_manager.onPeers() + site.updateWebsocket(peers_added=1) + added_sites.append(site) + + # Save sites changed value to avoid unnecessary site list download + if sender["peer_id"] not in self.known_peers: + self.known_peers[sender["peer_id"]] = {"added": time.time()} + + self.known_peers[sender["peer_id"]]["sites_changed"] = params["sites_changed"] + self.known_peers[sender["peer_id"]]["updated"] = time.time() + self.known_peers[sender["peer_id"]]["found"] = time.time() + + self.log.debug( + "Tracker result: Discover from %s response parsed in %.3fs, found: %s added: %s of %s" % + (sender["ip"], time.time() - s, num_found, added_sites, len(peer_sites)) + ) + + +@PluginManager.registerTo("FileServer") +class FileServerPlugin(object): + def __init__(self, *args, **kwargs): + res = super(FileServerPlugin, self).__init__(*args, **kwargs) + if config.broadcast_port and config.tor != "always" and not config.disable_udp: + self.local_announcer = LocalAnnouncer(self, config.broadcast_port) + else: + self.local_announcer = None + return res + + def start(self, *args, **kwargs): + if self.local_announcer: + gevent.spawn(self.local_announcer.start) + return super(FileServerPlugin, self).start(*args, **kwargs) + + def stop(self): + if self.local_announcer: + self.local_announcer.stop() + res = super(FileServerPlugin, self).stop() + return res + + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("AnnounceLocal plugin") + group.add_argument('--broadcast_port', help='UDP broadcasting port for local peer discovery', default=1544, type=int, metavar='port') + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/AnnounceLocal/BroadcastServer.py b/plugins/AnnounceLocal/BroadcastServer.py new file mode 100644 index 000000000..5863ad05c --- /dev/null +++ b/plugins/AnnounceLocal/BroadcastServer.py @@ -0,0 +1,140 @@ +import socket +import logging +import time +from contextlib import closing + +import msgpack + +from Debug import Debug +from util import UpnpPunch + + +class BroadcastServer(object): + def __init__(self, service_name, listen_port=1544, listen_ip=''): + self.log = logging.getLogger("BroadcastServer") + self.listen_port = listen_port + self.listen_ip = listen_ip + + self.running = False + self.sock = None + self.sender_info = {"service": service_name} + + def createBroadcastSocket(self): + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if hasattr(socket, 'SO_REUSEPORT'): + try: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + except Exception as err: + self.log.warning("Error setting SO_REUSEPORT: %s" % err) + + binded = False + for retry in range(3): + try: + sock.bind((self.listen_ip, self.listen_port)) + binded = True + break + except Exception as err: + self.log.error( + "Socket bind to %s:%s error: %s, retry #%s" % + (self.listen_ip, self.listen_port, Debug.formatException(err), retry) + ) + time.sleep(retry) + + if binded: + return sock + else: + return False + + def start(self): # Listens for discover requests + self.sock = self.createBroadcastSocket() + if not self.sock: + self.log.error("Unable to listen on port %s" % self.listen_port) + return + + self.log.debug("Started on port %s" % self.listen_port) + + self.running = True + + while self.running: + try: + data, addr = self.sock.recvfrom(8192) + except Exception as err: + if self.running: + self.log.error("Listener receive error: %s" % err) + continue + + if not self.running: + break + + try: + message = msgpack.unpackb(data) + response_addr, message = self.handleMessage(addr, message) + if message: + self.send(response_addr, message) + except Exception as err: + self.log.error("Handlemessage error: %s" % Debug.formatException(err)) + self.log.debug("Stopped listening on port %s" % self.listen_port) + + def stop(self): + self.log.debug("Stopping, socket: %s" % self.sock) + self.running = False + if self.sock: + self.sock.close() + + def send(self, addr, message): + if type(message) is not list: + message = [message] + + for message_part in message: + message_part["sender"] = self.sender_info + + self.log.debug("Send to %s: %s" % (addr, message_part["cmd"])) + with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.sendto(msgpack.packb(message_part), addr) + + def getMyIps(self): + return UpnpPunch._get_local_ips() + + def broadcast(self, message, port=None): + if not port: + port = self.listen_port + + my_ips = self.getMyIps() + addr = ("255.255.255.255", port) + + message["sender"] = self.sender_info + self.log.debug("Broadcast using ips %s on port %s: %s" % (my_ips, port, message["cmd"])) + + for my_ip in my_ips: + try: + with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as sock: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + sock.bind((my_ip, 0)) + sock.sendto(msgpack.packb(message), addr) + except Exception as err: + self.log.warning("Error sending broadcast using ip %s: %s" % (my_ip, err)) + + def handleMessage(self, addr, message): + self.log.debug("Got from %s: %s" % (addr, message["cmd"])) + cmd = message["cmd"] + params = message.get("params", {}) + sender = message["sender"] + sender["ip"] = addr[0] + + func_name = "action" + cmd[0].upper() + cmd[1:] + func = getattr(self, func_name, None) + + if sender["service"] != "zeronet" or sender["peer_id"] == self.sender_info["peer_id"]: + # Skip messages not for us or sent by us + message = None + elif func: + message = func(sender, params) + else: + self.log.debug("Unknown cmd: %s" % cmd) + message = None + + return (sender["ip"], sender["broadcast_port"]), message diff --git a/plugins/AnnounceLocal/Test/TestAnnounce.py b/plugins/AnnounceLocal/Test/TestAnnounce.py new file mode 100644 index 000000000..691ecc266 --- /dev/null +++ b/plugins/AnnounceLocal/Test/TestAnnounce.py @@ -0,0 +1,113 @@ +import time +import copy + +import gevent +import pytest +import mock + +from AnnounceLocal import AnnounceLocalPlugin +from File import FileServer +from Test import Spy + +@pytest.fixture +def announcer(file_server, site): + file_server.sites[site.address] = site + announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server, listen_port=1100) + file_server.local_announcer = announcer + announcer.listen_port = 1100 + announcer.sender_info["broadcast_port"] = 1100 + announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"]) + announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically + gevent.spawn(announcer.start) + time.sleep(0.5) + + assert file_server.local_announcer.running + return file_server.local_announcer + +@pytest.fixture +def announcer_remote(request, site_temp): + file_server_remote = FileServer("127.0.0.1", 1545) + file_server_remote.sites[site_temp.address] = site_temp + announcer = AnnounceLocalPlugin.LocalAnnouncer(file_server_remote, listen_port=1101) + file_server_remote.local_announcer = announcer + announcer.listen_port = 1101 + announcer.sender_info["broadcast_port"] = 1101 + announcer.getMyIps = mock.MagicMock(return_value=["127.0.0.1"]) + announcer.discover = mock.MagicMock(return_value=False) # Don't send discover requests automatically + gevent.spawn(announcer.start) + time.sleep(0.5) + + assert file_server_remote.local_announcer.running + + def cleanup(): + file_server_remote.stop() + request.addfinalizer(cleanup) + + + return file_server_remote.local_announcer + +@pytest.mark.usefixtures("resetSettings") +@pytest.mark.usefixtures("resetTempSettings") +class TestAnnounce: + def testSenderInfo(self, announcer): + sender_info = announcer.sender_info + assert sender_info["port"] > 0 + assert len(sender_info["peer_id"]) == 20 + assert sender_info["rev"] > 0 + + def testIgnoreSelfMessages(self, announcer): + # No response to messages that has same peer_id as server + assert not announcer.handleMessage(("0.0.0.0", 123), {"cmd": "discoverRequest", "sender": announcer.sender_info, "params": {}})[1] + + # Response to messages with different peer id + sender_info = copy.copy(announcer.sender_info) + sender_info["peer_id"] += "-" + addr, res = announcer.handleMessage(("0.0.0.0", 123), {"cmd": "discoverRequest", "sender": sender_info, "params": {}}) + assert res["params"]["sites_changed"] > 0 + + def testDiscoverRequest(self, announcer, announcer_remote): + assert len(announcer_remote.known_peers) == 0 + with Spy.Spy(announcer_remote, "handleMessage") as responses: + announcer_remote.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer.listen_port) + time.sleep(0.1) + + response_cmds = [response[1]["cmd"] for response in responses] + assert response_cmds == ["discoverResponse", "siteListResponse"] + assert len(responses[-1][1]["params"]["sites"]) == 1 + + # It should only request siteList if sites_changed value is different from last response + with Spy.Spy(announcer_remote, "handleMessage") as responses: + announcer_remote.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer.listen_port) + time.sleep(0.1) + + response_cmds = [response[1]["cmd"] for response in responses] + assert response_cmds == ["discoverResponse"] + + def testPeerDiscover(self, announcer, announcer_remote, site): + assert announcer.server.peer_id != announcer_remote.server.peer_id + assert len(announcer.server.sites.values()[0].peers) == 0 + announcer.broadcast({"cmd": "discoverRequest"}, port=announcer_remote.listen_port) + time.sleep(0.1) + assert len(announcer.server.sites.values()[0].peers) == 1 + + def testRecentPeerList(self, announcer, announcer_remote, site): + assert len(site.peers_recent) == 0 + assert len(site.peers) == 0 + with Spy.Spy(announcer, "handleMessage") as responses: + announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port) + time.sleep(0.1) + assert [response[1]["cmd"] for response in responses] == ["discoverResponse", "siteListResponse"] + assert len(site.peers_recent) == 1 + assert len(site.peers) == 1 + + # It should update peer without siteListResponse + last_time_found = site.peers.values()[0].time_found + site.peers_recent.clear() + with Spy.Spy(announcer, "handleMessage") as responses: + announcer.broadcast({"cmd": "discoverRequest", "params": {}}, port=announcer_remote.listen_port) + time.sleep(0.1) + assert [response[1]["cmd"] for response in responses] == ["discoverResponse"] + assert len(site.peers_recent) == 1 + assert site.peers.values()[0].time_found > last_time_found + + diff --git a/plugins/AnnounceLocal/Test/conftest.py b/plugins/AnnounceLocal/Test/conftest.py new file mode 100644 index 000000000..a88c642c7 --- /dev/null +++ b/plugins/AnnounceLocal/Test/conftest.py @@ -0,0 +1,4 @@ +from src.Test.conftest import * + +from Config import config +config.broadcast_port = 0 diff --git a/plugins/AnnounceLocal/Test/pytest.ini b/plugins/AnnounceLocal/Test/pytest.ini new file mode 100644 index 000000000..d09210d1d --- /dev/null +++ b/plugins/AnnounceLocal/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/AnnounceLocal/__init__.py b/plugins/AnnounceLocal/__init__.py new file mode 100644 index 000000000..defe24126 --- /dev/null +++ b/plugins/AnnounceLocal/__init__.py @@ -0,0 +1 @@ +import AnnounceLocalPlugin \ No newline at end of file diff --git a/plugins/AnnounceShare/AnnounceSharePlugin.py b/plugins/AnnounceShare/AnnounceSharePlugin.py new file mode 100644 index 000000000..10e3a3e60 --- /dev/null +++ b/plugins/AnnounceShare/AnnounceSharePlugin.py @@ -0,0 +1,188 @@ +import time +import os +import logging +import json +import atexit + +import gevent + +from Config import config +from Plugin import PluginManager +from util import helper + + +class TrackerStorage(object): + def __init__(self): + self.log = logging.getLogger("TrackerStorage") + self.file_path = "%s/trackers.json" % config.data_dir + self.load() + self.time_discover = 0.0 + atexit.register(self.save) + + def getDefaultFile(self): + return {"shared": {}} + + def onTrackerFound(self, tracker_address, type="shared", my=False): + if not tracker_address.startswith("zero://"): + return False + + trackers = self.getTrackers() + added = False + if tracker_address not in trackers: + trackers[tracker_address] = { + "time_added": time.time(), + "time_success": 0, + "latency": 99.0, + "num_error": 0, + "my": False + } + self.log.debug("New tracker found: %s" % tracker_address) + added = True + + trackers[tracker_address]["time_found"] = time.time() + trackers[tracker_address]["my"] = my + return added + + def onTrackerSuccess(self, tracker_address, latency): + trackers = self.getTrackers() + if tracker_address not in trackers: + return False + + trackers[tracker_address]["latency"] = latency + trackers[tracker_address]["time_success"] = time.time() + trackers[tracker_address]["num_error"] = 0 + + def onTrackerError(self, tracker_address): + trackers = self.getTrackers() + if tracker_address not in trackers: + return False + + trackers[tracker_address]["time_error"] = time.time() + trackers[tracker_address]["num_error"] += 1 + + if len(self.getWorkingTrackers()) >= config.working_shared_trackers_limit: + error_limit = 5 + else: + error_limit = 30 + error_limit + + if trackers[tracker_address]["num_error"] > error_limit and trackers[tracker_address]["time_success"] < time.time() - 60 * 60: + self.log.debug("Tracker %s looks down, removing." % tracker_address) + del trackers[tracker_address] + + def getTrackers(self, type="shared"): + return self.file_content.setdefault(type, {}) + + def getWorkingTrackers(self, type="shared"): + trackers = { + key: tracker for key, tracker in self.getTrackers(type).iteritems() + if tracker["time_success"] > time.time() - 60 * 60 + } + return trackers + + def getFileContent(self): + if not os.path.isfile(self.file_path): + open(self.file_path, "w").write("{}") + return self.getDefaultFile() + try: + return json.load(open(self.file_path)) + except Exception as err: + self.log.error("Error loading trackers list: %s" % err) + return self.getDefaultFile() + + def load(self): + self.file_content = self.getFileContent() + + trackers = self.getTrackers() + self.log.debug("Loaded %s shared trackers" % len(trackers)) + for address, tracker in trackers.items(): + tracker["num_error"] = 0 + if not address.startswith("zero://"): + del trackers[address] + + def save(self): + s = time.time() + helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True)) + self.log.debug("Saved in %.3fs" % (time.time() - s)) + + def discoverTrackers(self, peers): + if len(self.getWorkingTrackers()) > config.working_shared_trackers_limit: + return False + s = time.time() + num_success = 0 + for peer in peers: + if peer.connection and peer.connection.handshake.get("rev", 0) < 3560: + continue # Not supported + + res = peer.request("getTrackers") + if not res or "error" in res: + continue + + num_success += 1 + for tracker_address in res["trackers"]: + added = self.onTrackerFound(tracker_address) + if added: # Only add one tracker from one source + break + + if not num_success and len(peers) < 20: + self.time_discover = 0.0 + + if num_success: + self.save() + + self.log.debug("Trackers discovered from %s/%s peers in %.3fs" % (num_success, len(peers), time.time() - s)) + + +if "tracker_storage" not in locals(): + tracker_storage = TrackerStorage() + + +@PluginManager.registerTo("SiteAnnouncer") +class SiteAnnouncerPlugin(object): + def getTrackers(self): + if tracker_storage.time_discover < time.time() - 5 * 60: + tracker_storage.time_discover = time.time() + gevent.spawn(tracker_storage.discoverTrackers, self.site.getConnectedPeers()) + trackers = super(SiteAnnouncerPlugin, self).getTrackers() + shared_trackers = tracker_storage.getTrackers("shared").keys() + if shared_trackers: + return trackers + shared_trackers + else: + return trackers + + def announceTracker(self, tracker, *args, **kwargs): + res = super(SiteAnnouncerPlugin, self).announceTracker(tracker, *args, **kwargs) + if res: + latency = res + tracker_storage.onTrackerSuccess(tracker, latency) + elif res is False: + tracker_storage.onTrackerError(tracker) + + return res + + +@PluginManager.registerTo("FileRequest") +class FileRequestPlugin(object): + def actionGetTrackers(self, params): + shared_trackers = tracker_storage.getWorkingTrackers("shared").keys() + self.response({"trackers": shared_trackers}) + + +@PluginManager.registerTo("FileServer") +class FileServerPlugin(object): + def portCheck(self, *args, **kwargs): + res = super(FileServerPlugin, self).portCheck(*args, **kwargs) + if res and not config.tor == "always" and "Bootstrapper" in PluginManager.plugin_manager.plugin_names: + for ip in self.ip_external_list: + my_tracker_address = "zero://%s:%s" % (ip, config.fileserver_port) + tracker_storage.onTrackerFound(my_tracker_address, my=True) + return res + + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("AnnounceShare plugin") + group.add_argument('--working_shared_trackers_limit', help='Stop discovering new shared trackers after this number of shared trackers reached', default=5, type=int, metavar='limit') + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/AnnounceShare/Test/TestAnnounceShare.py b/plugins/AnnounceShare/Test/TestAnnounceShare.py new file mode 100644 index 000000000..4608eda7c --- /dev/null +++ b/plugins/AnnounceShare/Test/TestAnnounceShare.py @@ -0,0 +1,25 @@ +import pytest + +from AnnounceShare import AnnounceSharePlugin +from Peer import Peer +from Config import config + + +@pytest.mark.usefixtures("resetSettings") +@pytest.mark.usefixtures("resetTempSettings") +class TestAnnounceShare: + def testAnnounceList(self, file_server): + open("%s/trackers.json" % config.data_dir, "w").write("{}") + tracker_storage = AnnounceSharePlugin.tracker_storage + tracker_storage.load() + print tracker_storage.file_path, config.data_dir + peer = Peer(file_server.ip, 1544, connection_server=file_server) + assert peer.request("getTrackers")["trackers"] == [] + + tracker_storage.onTrackerFound("zero://%s:15441" % file_server.ip) + assert peer.request("getTrackers")["trackers"] == [] + + # It needs to have at least one successfull announce to be shared to other peers + tracker_storage.onTrackerSuccess("zero://%s:15441" % file_server.ip, 1.0) + assert peer.request("getTrackers")["trackers"] == ["zero://%s:15441" % file_server.ip] + diff --git a/plugins/AnnounceShare/Test/conftest.py b/plugins/AnnounceShare/Test/conftest.py new file mode 100644 index 000000000..5abd4dd68 --- /dev/null +++ b/plugins/AnnounceShare/Test/conftest.py @@ -0,0 +1,3 @@ +from src.Test.conftest import * + +from Config import config diff --git a/plugins/AnnounceShare/Test/pytest.ini b/plugins/AnnounceShare/Test/pytest.ini new file mode 100644 index 000000000..d09210d1d --- /dev/null +++ b/plugins/AnnounceShare/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/AnnounceShare/__init__.py b/plugins/AnnounceShare/__init__.py new file mode 100644 index 000000000..f55cb2c66 --- /dev/null +++ b/plugins/AnnounceShare/__init__.py @@ -0,0 +1 @@ +import AnnounceSharePlugin diff --git a/plugins/AnnounceZero/AnnounceZeroPlugin.py b/plugins/AnnounceZero/AnnounceZeroPlugin.py index 14dba61e5..b7f9e8236 100644 --- a/plugins/AnnounceZero/AnnounceZeroPlugin.py +++ b/plugins/AnnounceZero/AnnounceZeroPlugin.py @@ -1,8 +1,7 @@ -import hashlib import time +import itertools from Plugin import PluginManager -from Peer import Peer from util import helper from Crypt import CryptRsa @@ -11,108 +10,129 @@ connection_pool = {} # Tracker address: Peer object +# We can only import plugin host clases after the plugins are loaded +@PluginManager.afterLoad +def importHostClasses(): + global Peer, AnnounceError + from Peer import Peer + from Site.SiteAnnouncer import AnnounceError + + # Process result got back from tracker -def processPeerRes(site, peers): +def processPeerRes(tracker_address, site, peers): added = 0 # Ip4 - found_ip4 = 0 - for packed_address in peers["ip4"]: - found_ip4 += 1 + found_ipv4 = 0 + peers_normal = itertools.chain(peers.get("ip4", []), peers.get("ipv4", []), peers.get("ipv6", [])) + for packed_address in peers_normal: + found_ipv4 += 1 peer_ip, peer_port = helper.unpackAddress(packed_address) - if site.addPeer(peer_ip, peer_port): + if site.addPeer(peer_ip, peer_port, source="tracker"): added += 1 # Onion found_onion = 0 for packed_address in peers["onion"]: found_onion += 1 peer_onion, peer_port = helper.unpackOnionAddress(packed_address) - if site.addPeer(peer_onion, peer_port): + if site.addPeer(peer_onion, peer_port, source="tracker"): added += 1 if added: site.worker_manager.onPeers() site.updateWebsocket(peers_added=added) - site.log.debug("Found %s ip4, %s onion peers, new: %s" % (found_ip4, found_onion, added)) + return added -@PluginManager.registerTo("Site") -class SitePlugin(object): - def announceTracker(self, tracker_protocol, tracker_address, fileserver_port=0, add_types=[], my_peer_id="", mode="start"): - if tracker_protocol != "zero": - return super(SitePlugin, self).announceTracker( - tracker_protocol, tracker_address, fileserver_port, add_types, my_peer_id, mode - ) +@PluginManager.registerTo("SiteAnnouncer") +class SiteAnnouncerPlugin(object): + def getTrackerHandler(self, protocol): + if protocol == "zero": + return self.announceTrackerZero + else: + return super(SiteAnnouncerPlugin, self).getTrackerHandler(protocol) + def announceTrackerZero(self, tracker_address, mode="start", num_want=10): + global time_full_announced s = time.time() - need_types = ["ip4"] - if self.connection_server and self.connection_server.tor_manager and self.connection_server.tor_manager.enabled: + need_types = ["ip4"] # ip4 for backward compatibility reasons + need_types += self.site.connection_server.supported_ip_types + if self.site.connection_server.tor_manager.enabled: need_types.append("onion") if mode == "start" or mode == "more": # Single: Announce only this site - sites = [self] + sites = [self.site] full_announce = False else: # Multi: Announce all currently serving site full_announce = True - if time.time() - time_full_announced.get(tracker_address, 0) < 60 * 5: # No reannounce all sites within 5 minute - return True + if time.time() - time_full_announced.get(tracker_address, 0) < 60 * 15: # No reannounce all sites within short time + return None time_full_announced[tracker_address] = time.time() from Site import SiteManager sites = [site for site in SiteManager.site_manager.sites.values() if site.settings["serving"]] # Create request + add_types = self.getOpenedServiceTypes() request = { - "hashes": [], "onions": [], "port": fileserver_port, "need_types": need_types, "need_num": 20, "add": add_types + "hashes": [], "onions": [], "port": self.fileserver_port, "need_types": need_types, "need_num": 20, "add": add_types } for site in sites: if "onion" in add_types: - onion = self.connection_server.tor_manager.getOnion(site.address) + onion = self.site.connection_server.tor_manager.getOnion(site.address) request["onions"].append(onion) - request["hashes"].append(hashlib.sha256(site.address).digest()) + request["hashes"].append(site.address_hash) # Tracker can remove sites that we don't announce if full_announce: request["delete"] = True # Sent request to tracker - tracker = connection_pool.get(tracker_address) # Re-use tracker connection if possible - if not tracker: - tracker_ip, tracker_port = tracker_address.split(":") - tracker = Peer(tracker_ip, tracker_port, connection_server=self.connection_server) - connection_pool[tracker_address] = tracker - res = tracker.request("announce", request) + tracker_peer = connection_pool.get(tracker_address) # Re-use tracker connection if possible + if not tracker_peer: + tracker_ip, tracker_port = tracker_address.rsplit(":", 1) + tracker_peer = Peer(str(tracker_ip), int(tracker_port), connection_server=self.site.connection_server) + tracker_peer.is_tracker_connection = True + connection_pool[tracker_address] = tracker_peer + + res = tracker_peer.request("announce", request) if not res or "peers" not in res: - self.log.debug("Announce to %s failed: %s" % (tracker_address, res)) if full_announce: time_full_announced[tracker_address] = 0 - return False + raise AnnounceError("Invalid response: %s" % res) # Add peers from response to site site_index = 0 + peers_added = 0 for site_res in res["peers"]: site = sites[site_index] - processPeerRes(site, site_res) + peers_added += processPeerRes(tracker_address, site, site_res) site_index += 1 # Check if we need to sign prove the onion addresses if "onion_sign_this" in res: - self.log.debug("Signing %s for %s to add %s onions" % (res["onion_sign_this"], tracker_address, len(sites))) + self.site.log.debug("Signing %s for %s to add %s onions" % (res["onion_sign_this"], tracker_address, len(sites))) request["onion_signs"] = {} request["onion_sign_this"] = res["onion_sign_this"] request["need_num"] = 0 for site in sites: - onion = self.connection_server.tor_manager.getOnion(site.address) - sign = CryptRsa.sign(res["onion_sign_this"], self.connection_server.tor_manager.getPrivatekey(onion)) - request["onion_signs"][self.connection_server.tor_manager.getPublickey(onion)] = sign - res = tracker.request("announce", request) + onion = self.site.connection_server.tor_manager.getOnion(site.address) + publickey = self.site.connection_server.tor_manager.getPublickey(onion) + if publickey not in request["onion_signs"]: + sign = CryptRsa.sign(res["onion_sign_this"], self.site.connection_server.tor_manager.getPrivatekey(onion)) + request["onion_signs"][publickey] = sign + res = tracker_peer.request("announce", request) if not res or "onion_sign_this" in res: - self.log.debug("Announce onion address to %s failed: %s" % (tracker_address, res)) if full_announce: time_full_announced[tracker_address] = 0 - return False + raise AnnounceError("Announce onion address to failed: %s" % res) if full_announce: - tracker.remove() # Close connection, we don't need it in next 5 minute + tracker_peer.remove() # Close connection, we don't need it in next 5 minute + + self.site.log.debug( + "Tracker announce result: zero://%s (sites: %s, new peers: %s) in %.3fs" % + (tracker_address, site_index, peers_added, time.time() - s) + ) - return time.time() - s + return True diff --git a/plugins/Bigfile/BigfilePiecefield.py b/plugins/Bigfile/BigfilePiecefield.py new file mode 100644 index 000000000..c76902794 --- /dev/null +++ b/plugins/Bigfile/BigfilePiecefield.py @@ -0,0 +1,158 @@ +import array + + +def packPiecefield(data): + res = [] + if not data: + return array.array("H", "") + + if data[0] == "0": + res.append(0) + find = "1" + else: + find = "0" + last_pos = 0 + pos = 0 + while 1: + pos = data.find(find, pos) + if find == "0": + find = "1" + else: + find = "0" + if pos == -1: + res.append(len(data) - last_pos) + break + res.append(pos - last_pos) + last_pos = pos + return array.array("H", res) + + +def unpackPiecefield(data): + if not data: + return "" + + res = [] + char = "1" + for times in data: + if times > 10000: + return "" + res.append(char * times) + if char == "1": + char = "0" + else: + char = "1" + return "".join(res) + + +class BigfilePiecefield(object): + __slots__ = ["data"] + + def __init__(self): + self.data = "" + + def fromstring(self, s): + self.data = s + + def tostring(self): + return self.data + + def pack(self): + return packPiecefield(self.data).tostring() + + def unpack(self, s): + self.data = unpackPiecefield(array.array("H", s)) + + def __getitem__(self, key): + try: + return int(self.data[key]) + except IndexError: + return False + + def __setitem__(self, key, value): + data = self.data + if len(data) < key: + data = data.ljust(key+1, "0") + data = data[:key] + str(int(value)) + data[key + 1:] + self.data = data + + +class BigfilePiecefieldPacked(object): + __slots__ = ["data"] + + def __init__(self): + self.data = "" + + def fromstring(self, data): + self.data = packPiecefield(data).tostring() + + def tostring(self): + return unpackPiecefield(array.array("H", self.data)) + + def pack(self): + return array.array("H", self.data).tostring() + + def unpack(self, data): + self.data = data + + def __getitem__(self, key): + try: + return int(self.tostring()[key]) + except IndexError: + return False + + def __setitem__(self, key, value): + data = self.tostring() + if len(data) < key: + data = data.ljust(key+1, "0") + data = data[:key] + str(int(value)) + data[key + 1:] + self.fromstring(data) + + +if __name__ == "__main__": + import os + import psutil + import time + testdata = "1" * 100 + "0" * 900 + "1" * 4000 + "0" * 4999 + "1" + meminfo = psutil.Process(os.getpid()).memory_info + + for storage in [BigfilePiecefieldPacked, BigfilePiecefield]: + print "-- Testing storage: %s --" % storage + m = meminfo()[0] + s = time.time() + piecefields = {} + for i in range(10000): + piecefield = storage() + piecefield.fromstring(testdata[:i] + "0" + testdata[i + 1:]) + piecefields[i] = piecefield + + print "Create x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)) + + m = meminfo()[0] + s = time.time() + for piecefield in piecefields.values(): + val = piecefield[1000] + + print "Query one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s) + + m = meminfo()[0] + s = time.time() + for piecefield in piecefields.values(): + piecefield[1000] = True + + print "Change one x10000: +%sKB in %.3fs" % ((meminfo()[0] - m) / 1024, time.time() - s) + + m = meminfo()[0] + s = time.time() + for piecefield in piecefields.values(): + packed = piecefield.pack() + + print "Pack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(packed)) + + m = meminfo()[0] + s = time.time() + for piecefield in piecefields.values(): + piecefield.unpack(packed) + + print "Unpack x10000: +%sKB in %.3fs (len: %s)" % ((meminfo()[0] - m) / 1024, time.time() - s, len(piecefields[0].data)) + + piecefields = {} diff --git a/plugins/Bigfile/BigfilePlugin.py b/plugins/Bigfile/BigfilePlugin.py new file mode 100644 index 000000000..d9b4ff1dc --- /dev/null +++ b/plugins/Bigfile/BigfilePlugin.py @@ -0,0 +1,769 @@ +import time +import os +import subprocess +import shutil +import collections +import math +import json + +import msgpack +import gevent +import gevent.lock + +from Plugin import PluginManager +from Debug import Debug +from Crypt import CryptHash +from lib import merkletools +from util import helper +import util +from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked + + +# We can only import plugin host clases after the plugins are loaded +@PluginManager.afterLoad +def importPluginnedClasses(): + global VerifyError, config + from Content.ContentManager import VerifyError + from Config import config + +if "upload_nonces" not in locals(): + upload_nonces = {} + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def isCorsAllowed(self, path): + if path == "/ZeroNet-Internal/BigfileUpload": + return True + else: + return super(UiRequestPlugin, self).isCorsAllowed(path) + + def actionBigfileUpload(self): + nonce = self.get.get("upload_nonce") + if nonce not in upload_nonces: + return self.error403("Upload nonce error.") + + upload_info = upload_nonces[nonce] + del upload_nonces[nonce] + + self.sendHeader(200, "text/html", noscript=True, extra_headers={ + "Access-Control-Allow-Origin": "null", + "Access-Control-Allow-Credentials": "true" + }) + + self.readMultipartHeaders(self.env['wsgi.input']) # Skip http headers + + site = upload_info["site"] + inner_path = upload_info["inner_path"] + + with site.storage.open(inner_path, "wb", create_dirs=True) as out_file: + merkle_root, piece_size, piecemap_info = site.content_manager.hashBigfile( + self.env['wsgi.input'], upload_info["size"], upload_info["piece_size"], out_file + ) + + if len(piecemap_info["sha512_pieces"]) == 1: # Small file, don't split + hash = piecemap_info["sha512_pieces"][0].encode("hex") + hash_id = site.content_manager.hashfield.getHashId(hash) + site.content_manager.optionalDownloaded(inner_path, hash_id, upload_info["size"], own=True) + + else: # Big file + file_name = helper.getFilename(inner_path) + msgpack.pack({file_name: piecemap_info}, site.storage.open(upload_info["piecemap"], "wb")) + + # Find piecemap and file relative path to content.json + file_info = site.content_manager.getFileInfo(inner_path, new_file=True) + content_inner_path_dir = helper.getDirname(file_info["content_inner_path"]) + piecemap_relative_path = upload_info["piecemap"][len(content_inner_path_dir):] + file_relative_path = inner_path[len(content_inner_path_dir):] + + # Add file to content.json + if site.storage.isFile(file_info["content_inner_path"]): + content = site.storage.loadJson(file_info["content_inner_path"]) + else: + content = {} + if "files_optional" not in content: + content["files_optional"] = {} + + content["files_optional"][file_relative_path] = { + "sha512": merkle_root, + "size": upload_info["size"], + "piecemap": piecemap_relative_path, + "piece_size": piece_size + } + + merkle_root_hash_id = site.content_manager.hashfield.getHashId(merkle_root) + site.content_manager.optionalDownloaded(inner_path, merkle_root_hash_id, upload_info["size"], own=True) + site.storage.writeJson(file_info["content_inner_path"], content) + + site.content_manager.contents.loadItem(file_info["content_inner_path"]) # reload cache + + return json.dumps({ + "merkle_root": merkle_root, + "piece_num": len(piecemap_info["sha512_pieces"]), + "piece_size": piece_size, + "inner_path": inner_path + }) + + def readMultipartHeaders(self, wsgi_input): + for i in range(100): + line = wsgi_input.readline() + if line == "\r\n": + break + return i + + def actionFile(self, file_path, *args, **kwargs): + if kwargs.get("file_size", 0) > 1024 * 1024 and kwargs.get("path_parts"): # Only check files larger than 1MB + path_parts = kwargs["path_parts"] + site = self.server.site_manager.get(path_parts["address"]) + big_file = site.storage.openBigfile(path_parts["inner_path"], prebuffer=2 * 1024 * 1024) + if big_file: + kwargs["file_obj"] = big_file + kwargs["file_size"] = big_file.size + + return super(UiRequestPlugin, self).actionFile(file_path, *args, **kwargs) + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def actionBigfileUploadInit(self, to, inner_path, size): + valid_signers = self.site.content_manager.getValidSigners(inner_path) + auth_address = self.user.getAuthAddress(self.site.address) + if not self.site.settings["own"] and auth_address not in valid_signers: + self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers)) + return self.response(to, {"error": "Forbidden, you can only modify your own files"}) + + nonce = CryptHash.random() + piece_size = 1024 * 1024 + inner_path = self.site.content_manager.sanitizePath(inner_path) + file_info = self.site.content_manager.getFileInfo(inner_path, new_file=True) + + content_inner_path_dir = helper.getDirname(file_info["content_inner_path"]) + file_relative_path = inner_path[len(content_inner_path_dir):] + + upload_nonces[nonce] = { + "added": time.time(), + "site": self.site, + "inner_path": inner_path, + "websocket_client": self, + "size": size, + "piece_size": piece_size, + "piecemap": inner_path + ".piecemap.msgpack" + } + return { + "url": "/ZeroNet-Internal/BigfileUpload?upload_nonce=" + nonce, + "piece_size": piece_size, + "inner_path": inner_path, + "file_relative_path": file_relative_path + } + + def actionSiteSetAutodownloadBigfileLimit(self, to, limit): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + + self.site.settings["autodownload_bigfile_size_limit"] = int(limit) + self.response(to, "ok") + + def actionFileDelete(self, to, inner_path): + piecemap_inner_path = inner_path + ".piecemap.msgpack" + if self.hasFilePermission(inner_path) and self.site.storage.isFile(piecemap_inner_path): + # Also delete .piecemap.msgpack file if exists + self.log.debug("Deleting piecemap: %s" % piecemap_inner_path) + file_info = self.site.content_manager.getFileInfo(piecemap_inner_path) + if file_info: + content_json = self.site.storage.loadJson(file_info["content_inner_path"]) + relative_path = file_info["relative_path"] + if relative_path in content_json.get("files_optional", {}): + del content_json["files_optional"][relative_path] + self.site.storage.writeJson(file_info["content_inner_path"], content_json) + self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True) + try: + self.site.storage.delete(piecemap_inner_path) + except Exception, err: + self.log.error("File %s delete error: %s" % (piecemap_inner_path, err)) + + return super(UiWebsocketPlugin, self).actionFileDelete(to, inner_path) + + +@PluginManager.registerTo("ContentManager") +class ContentManagerPlugin(object): + def getFileInfo(self, inner_path, *args, **kwargs): + if "|" not in inner_path: + return super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs) + + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + file_info = super(ContentManagerPlugin, self).getFileInfo(inner_path, *args, **kwargs) + return file_info + + def readFile(self, file_in, size, buff_size=1024 * 64): + part_num = 0 + recv_left = size + + while 1: + part_num += 1 + read_size = min(buff_size, recv_left) + part = file_in.read(read_size) + + if not part: + break + yield part + + if part_num % 100 == 0: # Avoid blocking ZeroNet execution during upload + time.sleep(0.001) + + recv_left -= read_size + if recv_left <= 0: + break + + def hashBigfile(self, file_in, size, piece_size=1024 * 1024, file_out=None): + self.site.settings["has_bigfile"] = True + + recv = 0 + try: + piece_hash = CryptHash.sha512t() + piece_hashes = [] + piece_recv = 0 + + mt = merkletools.MerkleTools() + mt.hash_function = CryptHash.sha512t + + part = "" + for part in self.readFile(file_in, size): + if file_out: + file_out.write(part) + + recv += len(part) + piece_recv += len(part) + piece_hash.update(part) + if piece_recv >= piece_size: + piece_digest = piece_hash.digest() + piece_hashes.append(piece_digest) + mt.leaves.append(piece_digest) + piece_hash = CryptHash.sha512t() + piece_recv = 0 + + if len(piece_hashes) % 100 == 0 or recv == size: + self.log.info("- [HASHING:%.0f%%] Pieces: %s, %.1fMB/%.1fMB" % ( + float(recv) / size * 100, len(piece_hashes), recv / 1024 / 1024, size / 1024 / 1024 + )) + part = "" + if len(part) > 0: + piece_digest = piece_hash.digest() + piece_hashes.append(piece_digest) + mt.leaves.append(piece_digest) + except Exception as err: + raise err + finally: + if file_out: + file_out.close() + + mt.make_tree() + return mt.get_merkle_root(), piece_size, { + "sha512_pieces": piece_hashes + } + + def hashFile(self, dir_inner_path, file_relative_path, optional=False): + inner_path = dir_inner_path + file_relative_path + + file_size = self.site.storage.getSize(inner_path) + # Only care about optional files >1MB + if not optional or file_size < 1 * 1024 * 1024: + return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional) + + back = {} + content = self.contents.get(dir_inner_path + "content.json") + + hash = None + piecemap_relative_path = None + piece_size = None + + # Don't re-hash if it's already in content.json + if content and file_relative_path in content.get("files_optional", {}): + file_node = content["files_optional"][file_relative_path] + if file_node["size"] == file_size: + self.log.info("- [SAME SIZE] %s" % file_relative_path) + hash = file_node.get("sha512") + piecemap_relative_path = file_node.get("piecemap") + piece_size = file_node.get("piece_size") + + if not hash or not piecemap_relative_path: # Not in content.json yet + if file_size < 5 * 1024 * 1024: # Don't create piecemap automatically for files smaller than 5MB + return super(ContentManagerPlugin, self).hashFile(dir_inner_path, file_relative_path, optional) + + self.log.info("- [HASHING] %s" % file_relative_path) + merkle_root, piece_size, piecemap_info = self.hashBigfile(self.site.storage.open(inner_path, "rb"), file_size) + if not hash: + hash = merkle_root + + if not piecemap_relative_path: + file_name = helper.getFilename(file_relative_path) + piecemap_relative_path = file_relative_path + ".piecemap.msgpack" + piecemap_inner_path = inner_path + ".piecemap.msgpack" + + msgpack.pack({file_name: piecemap_info}, self.site.storage.open(piecemap_inner_path, "wb")) + + back.update(super(ContentManagerPlugin, self).hashFile(dir_inner_path, piecemap_relative_path, optional=True)) + + piece_num = int(math.ceil(float(file_size) / piece_size)) + + # Add the merkle root to hashfield + hash_id = self.site.content_manager.hashfield.getHashId(hash) + self.optionalDownloaded(inner_path, hash_id, file_size, own=True) + self.site.storage.piecefields[hash].fromstring("1" * piece_num) + + back[file_relative_path] = {"sha512": hash, "size": file_size, "piecemap": piecemap_relative_path, "piece_size": piece_size} + return back + + def getPiecemap(self, inner_path): + file_info = self.site.content_manager.getFileInfo(inner_path) + piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"] + self.site.needFile(piecemap_inner_path, priority=20) + piecemap = msgpack.unpack(self.site.storage.open(piecemap_inner_path))[helper.getFilename(inner_path)] + piecemap["piece_size"] = file_info["piece_size"] + return piecemap + + def verifyPiece(self, inner_path, pos, piece): + piecemap = self.getPiecemap(inner_path) + piece_i = pos / piecemap["piece_size"] + if CryptHash.sha512sum(piece, format="digest") != piecemap["sha512_pieces"][piece_i]: + raise VerifyError("Invalid hash") + return True + + def verifyFile(self, inner_path, file, ignore_same=True): + if "|" not in inner_path: + return super(ContentManagerPlugin, self).verifyFile(inner_path, file, ignore_same) + + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + + return self.verifyPiece(inner_path, pos_from, file) + + def optionalDownloaded(self, inner_path, hash_id, size=None, own=False): + if "|" in inner_path: + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + file_info = self.getFileInfo(inner_path) + + # Mark piece downloaded + piece_i = pos_from / file_info["piece_size"] + self.site.storage.piecefields[file_info["sha512"]][piece_i] = True + + # Only add to site size on first request + if hash_id in self.hashfield: + size = 0 + elif size > 1024 * 1024: + file_info = self.getFileInfo(inner_path) + if file_info and "sha512" in file_info: # We already have the file, but not in piecefield + sha512 = file_info["sha512"] + if sha512 not in self.site.storage.piecefields: + self.site.storage.checkBigfile(inner_path) + + return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own) + + def optionalRemoved(self, inner_path, hash_id, size=None): + if size and size > 1024 * 1024: + file_info = self.getFileInfo(inner_path) + sha512 = file_info["sha512"] + if sha512 in self.site.storage.piecefields: + del self.site.storage.piecefields[sha512] + + # Also remove other pieces of the file from download queue + for key in self.site.bad_files.keys(): + if key.startswith(inner_path + "|"): + del self.site.bad_files[key] + self.site.worker_manager.removeSolvedFileTasks() + return super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size) + + +@PluginManager.registerTo("SiteStorage") +class SiteStoragePlugin(object): + def __init__(self, *args, **kwargs): + super(SiteStoragePlugin, self).__init__(*args, **kwargs) + self.piecefields = collections.defaultdict(BigfilePiecefield) + if "piecefields" in self.site.settings.get("cache", {}): + for sha512, piecefield_packed in self.site.settings["cache"].get("piecefields").iteritems(): + if piecefield_packed: + self.piecefields[sha512].unpack(piecefield_packed.decode("base64")) + self.site.settings["cache"]["piecefields"] = {} + + def createSparseFile(self, inner_path, size, sha512=None): + file_path = self.getPath(inner_path) + + file_dir = os.path.dirname(file_path) + if not os.path.isdir(file_dir): + os.makedirs(file_dir) + + f = open(file_path, 'wb') + f.truncate(min(1024 * 1024 * 5, size)) # Only pre-allocate up to 5MB + f.close() + if os.name == "nt": + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + subprocess.call(["fsutil", "sparse", "setflag", file_path], close_fds=True, startupinfo=startupinfo) + + if sha512 and sha512 in self.piecefields: + self.log.debug("%s: File not exists, but has piecefield. Deleting piecefield." % inner_path) + del self.piecefields[sha512] + + def write(self, inner_path, content): + if "|" not in inner_path: + return super(SiteStoragePlugin, self).write(inner_path, content) + + # Write to specific position by passing |{pos} after the filename + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + file_path = self.getPath(inner_path) + + # Create dir if not exist + file_dir = os.path.dirname(file_path) + if not os.path.isdir(file_dir): + os.makedirs(file_dir) + + if not os.path.isfile(file_path): + file_info = self.site.content_manager.getFileInfo(inner_path) + self.createSparseFile(inner_path, file_info["size"]) + + # Write file + with open(file_path, "rb+") as file: + file.seek(pos_from) + if hasattr(content, 'read'): # File-like object + shutil.copyfileobj(content, file) # Write buff to disk + else: # Simple string + file.write(content) + del content + self.onUpdated(inner_path) + + def checkBigfile(self, inner_path): + file_info = self.site.content_manager.getFileInfo(inner_path) + if not file_info or (file_info and "piecemap" not in file_info): # It's not a big file + return False + + self.site.settings["has_bigfile"] = True + file_path = self.getPath(inner_path) + sha512 = file_info["sha512"] + piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"])) + if os.path.isfile(file_path): + if sha512 not in self.piecefields: + if open(file_path).read(128) == "\0" * 128: + piece_data = "0" + else: + piece_data = "1" + self.log.debug("%s: File exists, but not in piecefield. Filling piecefiled with %s * %s." % (inner_path, piece_num, piece_data)) + self.piecefields[sha512].fromstring(piece_data * piece_num) + else: + self.log.debug("Creating bigfile: %s" % inner_path) + self.createSparseFile(inner_path, file_info["size"], sha512) + self.piecefields[sha512].fromstring("0" * piece_num) + return True + + def openBigfile(self, inner_path, prebuffer=0): + if not self.checkBigfile(inner_path): + return False + self.site.needFile(inner_path, blocking=False) # Download piecemap + return BigFile(self.site, inner_path, prebuffer=prebuffer) + + +class BigFile(object): + def __init__(self, site, inner_path, prebuffer=0): + self.site = site + self.inner_path = inner_path + file_path = site.storage.getPath(inner_path) + file_info = self.site.content_manager.getFileInfo(inner_path) + self.piece_size = file_info["piece_size"] + self.sha512 = file_info["sha512"] + self.size = file_info["size"] + self.prebuffer = prebuffer + self.read_bytes = 0 + + self.piecefield = self.site.storage.piecefields[self.sha512] + self.f = open(file_path, "rb+") + self.read_lock = gevent.lock.Semaphore() + + def read(self, buff=64 * 1024): + with self.read_lock: + pos = self.f.tell() + read_until = min(self.size, pos + buff) + requests = [] + # Request all required blocks + while 1: + piece_i = pos / self.piece_size + if piece_i * self.piece_size >= read_until: + break + pos_from = piece_i * self.piece_size + pos_to = pos_from + self.piece_size + if not self.piecefield[piece_i]: + requests.append(self.site.needFile("%s|%s-%s" % (self.inner_path, pos_from, pos_to), blocking=False, update=True, priority=10)) + pos += self.piece_size + + if not all(requests): + return None + + # Request prebuffer + if self.prebuffer: + prebuffer_until = min(self.size, read_until + self.prebuffer) + priority = 3 + while 1: + piece_i = pos / self.piece_size + if piece_i * self.piece_size >= prebuffer_until: + break + pos_from = piece_i * self.piece_size + pos_to = pos_from + self.piece_size + if not self.piecefield[piece_i]: + self.site.needFile("%s|%s-%s" % (self.inner_path, pos_from, pos_to), blocking=False, update=True, priority=max(0, priority)) + priority -= 1 + pos += self.piece_size + + gevent.joinall(requests) + self.read_bytes += buff + + # Increase buffer for long reads + if self.read_bytes > 7 * 1024 * 1024 and self.prebuffer < 5 * 1024 * 1024: + self.site.log.debug("%s: Increasing bigfile buffer size to 5MB..." % self.inner_path) + self.prebuffer = 5 * 1024 * 1024 + + return self.f.read(buff) + + def seek(self, pos, whence=0): + with self.read_lock: + if whence == 2: # Relative from file end + pos = self.size + pos # Use the real size instead of size on the disk + whence = 0 + return self.f.seek(pos, whence) + + def tell(self): + return self.f.tell() + + def close(self): + self.f.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + + +@PluginManager.registerTo("WorkerManager") +class WorkerManagerPlugin(object): + def addTask(self, inner_path, *args, **kwargs): + file_info = kwargs.get("file_info") + if file_info and "piecemap" in file_info: # Bigfile + self.site.settings["has_bigfile"] = True + + piecemap_inner_path = helper.getDirname(file_info["content_inner_path"]) + file_info["piecemap"] + piecemap_task = None + if not self.site.storage.isFile(piecemap_inner_path): + # Start download piecemap + piecemap_task = super(WorkerManagerPlugin, self).addTask(piecemap_inner_path, priority=30) + autodownload_bigfile_size_limit = self.site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit) + if "|" not in inner_path and self.site.isDownloadable(inner_path) and file_info["size"] / 1024 / 1024 <= autodownload_bigfile_size_limit: + gevent.spawn_later(0.1, self.site.needFile, inner_path + "|all") # Download all pieces + + if "|" in inner_path: + # Start download piece + task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs) + + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + task["piece_i"] = pos_from / file_info["piece_size"] + task["sha512"] = file_info["sha512"] + else: + if inner_path in self.site.bad_files: + del self.site.bad_files[inner_path] + if piecemap_task: + task = piecemap_task + else: + fake_evt = gevent.event.AsyncResult() # Don't download anything if no range specified + fake_evt.set(True) + task = {"evt": fake_evt} + + if not self.site.storage.isFile(inner_path): + self.site.storage.createSparseFile(inner_path, file_info["size"], file_info["sha512"]) + piece_num = int(math.ceil(float(file_info["size"]) / file_info["piece_size"])) + self.site.storage.piecefields[file_info["sha512"]].fromstring("0" * piece_num) + else: + task = super(WorkerManagerPlugin, self).addTask(inner_path, *args, **kwargs) + return task + + def taskAddPeer(self, task, peer): + if "piece_i" in task: + if not peer.piecefields[task["sha512"]][task["piece_i"]]: + if task["sha512"] not in peer.piecefields: + gevent.spawn(peer.updatePiecefields, force=True) + elif not task["peers"]: + gevent.spawn(peer.updatePiecefields) + + return False # Deny to add peers to task if file not in piecefield + return super(WorkerManagerPlugin, self).taskAddPeer(task, peer) + + +@PluginManager.registerTo("FileRequest") +class FileRequestPlugin(object): + def isReadable(self, site, inner_path, file, pos): + # Peek into file + if file.read(10) == "\0" * 10: + # Looks empty, but makes sures we don't have that piece + file_info = site.content_manager.getFileInfo(inner_path) + if "piece_size" in file_info: + piece_i = pos / file_info["piece_size"] + if not site.storage.piecefields[file_info["sha512"]][piece_i]: + return False + # Seek back to position we want to read + file.seek(pos) + return super(FileRequestPlugin, self).isReadable(site, inner_path, file, pos) + + def actionGetPiecefields(self, params): + site = self.sites.get(params["site"]) + if not site or not site.settings["serving"]: # Site unknown or not serving + self.response({"error": "Unknown site"}) + return False + + # Add peer to site if not added before + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) + if not peer.connection: # Just added + peer.connect(self.connection) # Assign current connection to peer + + piecefields_packed = {sha512: piecefield.pack() for sha512, piecefield in site.storage.piecefields.iteritems()} + self.response({"piecefields_packed": piecefields_packed}) + + def actionSetPiecefields(self, params): + site = self.sites.get(params["site"]) + if not site or not site.settings["serving"]: # Site unknown or not serving + self.response({"error": "Unknown site"}) + self.connection.badAction(5) + return False + + # Add or get peer + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection) + if not peer.connection: + peer.connect(self.connection) + + peer.piecefields = collections.defaultdict(BigfilePiecefieldPacked) + for sha512, piecefield_packed in params["piecefields_packed"].iteritems(): + peer.piecefields[sha512].unpack(piecefield_packed) + site.settings["has_bigfile"] = True + + self.response({"ok": "Updated"}) + + +@PluginManager.registerTo("Peer") +class PeerPlugin(object): + def __getattr__(self, key): + if key == "piecefields": + self.piecefields = collections.defaultdict(BigfilePiecefieldPacked) + return self.piecefields + elif key == "time_piecefields_updated": + self.time_piecefields_updated = None + return self.time_piecefields_updated + else: + return super(PeerPlugin, self).__getattr__(key) + + @util.Noparallel(ignore_args=True) + def updatePiecefields(self, force=False): + if self.connection and self.connection.handshake.get("rev", 0) < 2190: + return False # Not supported + + # Don't update piecefield again in 1 min + if self.time_piecefields_updated and time.time() - self.time_piecefields_updated < 60 and not force: + return False + + self.time_piecefields_updated = time.time() + res = self.request("getPiecefields", {"site": self.site.address}) + if not res or "error" in res: + return False + + self.piecefields = collections.defaultdict(BigfilePiecefieldPacked) + try: + for sha512, piecefield_packed in res["piecefields_packed"].iteritems(): + self.piecefields[sha512].unpack(piecefield_packed) + except Exception as err: + self.log("Invalid updatePiecefields response: %s" % Debug.formatException(err)) + + return self.piecefields + + def sendMyHashfield(self, *args, **kwargs): + return super(PeerPlugin, self).sendMyHashfield(*args, **kwargs) + + def updateHashfield(self, *args, **kwargs): + if self.site.settings.get("has_bigfile"): + thread = gevent.spawn(self.updatePiecefields, *args, **kwargs) + back = super(PeerPlugin, self).updateHashfield(*args, **kwargs) + thread.join() + return back + else: + return super(PeerPlugin, self).updateHashfield(*args, **kwargs) + + def getFile(self, site, inner_path, *args, **kwargs): + if "|" in inner_path: + inner_path, file_range = inner_path.split("|") + pos_from, pos_to = map(int, file_range.split("-")) + kwargs["pos_from"] = pos_from + kwargs["pos_to"] = pos_to + return super(PeerPlugin, self).getFile(site, inner_path, *args, **kwargs) + + +@PluginManager.registerTo("Site") +class SitePlugin(object): + def isFileDownloadAllowed(self, inner_path, file_info): + if "piecemap" in file_info: + file_size_mb = file_info["size"] / 1024 / 1024 + if config.bigfile_size_limit and file_size_mb > config.bigfile_size_limit: + self.log.debug( + "Bigfile size %s too large: %sMB > %sMB, skipping..." % + (inner_path, file_size_mb, config.bigfile_size_limit) + ) + return False + + file_info = file_info.copy() + file_info["size"] = file_info["piece_size"] + return super(SitePlugin, self).isFileDownloadAllowed(inner_path, file_info) + + def getSettingsCache(self): + back = super(SitePlugin, self).getSettingsCache() + if self.storage.piecefields: + back["piecefields"] = {sha512: piecefield.pack().encode("base64") for sha512, piecefield in self.storage.piecefields.iteritems()} + return back + + def needFile(self, inner_path, *args, **kwargs): + if inner_path.endswith("|all"): + @util.Pooled(20) + def pooledNeedBigfile(inner_path, *args, **kwargs): + if inner_path not in self.bad_files: + self.log.debug("Cancelled piece, skipping %s" % inner_path) + return False + return self.needFile(inner_path, *args, **kwargs) + + inner_path = inner_path.replace("|all", "") + file_info = self.needFileInfo(inner_path) + file_size = file_info["size"] + piece_size = file_info["piece_size"] + + piece_num = int(math.ceil(float(file_size) / piece_size)) + + file_threads = [] + + piecefield = self.storage.piecefields.get(file_info["sha512"]) + + for piece_i in range(piece_num): + piece_from = piece_i * piece_size + piece_to = min(file_size, piece_from + piece_size) + if not piecefield or not piecefield[piece_i]: + inner_path_piece = "%s|%s-%s" % (inner_path, piece_from, piece_to) + self.bad_files[inner_path_piece] = self.bad_files.get(inner_path_piece, 1) + res = pooledNeedBigfile(inner_path_piece, blocking=False) + if res is not True and res is not False: + file_threads.append(res) + gevent.joinall(file_threads) + else: + return super(SitePlugin, self).needFile(inner_path, *args, **kwargs) + + +@PluginManager.registerTo("ConfigPlugin") +class ConfigPlugin(object): + def createArguments(self): + group = self.parser.add_argument_group("Bigfile plugin") + group.add_argument('--autodownload_bigfile_size_limit', help='Also download bigfiles smaller than this limit if help distribute option is checked', default=1, metavar="MB", type=int) + group.add_argument('--bigfile_size_limit', help='Maximum size of downloaded big files', default=False, metavar="MB", type=int) + + return super(ConfigPlugin, self).createArguments() diff --git a/plugins/Bigfile/Test/TestBigfile.py b/plugins/Bigfile/Test/TestBigfile.py new file mode 100644 index 000000000..de1266824 --- /dev/null +++ b/plugins/Bigfile/Test/TestBigfile.py @@ -0,0 +1,522 @@ +import time +from cStringIO import StringIO + +import pytest +import msgpack +import mock + +from Connection import ConnectionServer +from Content.ContentManager import VerifyError +from File import FileServer +from File import FileRequest +from Worker import WorkerManager +from Peer import Peer +from Bigfile import BigfilePiecefield, BigfilePiecefieldPacked +from Test import Spy + + +@pytest.mark.usefixtures("resetSettings") +@pytest.mark.usefixtures("resetTempSettings") +class TestBigfile: + privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" + + def createBigfile(self, site, inner_path="data/optional.any.iso", pieces=10): + f = site.storage.open(inner_path, "w") + for i in range(pieces * 100): + f.write(("Test%s" % i).ljust(10, "-") * 1000) + f.close() + assert site.content_manager.sign("content.json", self.privatekey) + return inner_path + + def testPiecemapCreate(self, site): + inner_path = self.createBigfile(site) + content = site.storage.loadJson("content.json") + assert "data/optional.any.iso" in content["files_optional"] + file_node = content["files_optional"][inner_path] + assert file_node["size"] == 10 * 1000 * 1000 + assert file_node["sha512"] == "47a72cde3be80b4a829e7674f72b7c6878cf6a70b0c58c6aa6c17d7e9948daf6" + assert file_node["piecemap"] == inner_path + ".piecemap.msgpack" + + piecemap = msgpack.unpack(site.storage.open(file_node["piecemap"], "rb"))["optional.any.iso"] + assert len(piecemap["sha512_pieces"]) == 10 + assert piecemap["sha512_pieces"][0] != piecemap["sha512_pieces"][1] + assert piecemap["sha512_pieces"][0].encode("hex") == "a73abad9992b3d0b672d0c2a292046695d31bebdcb1e150c8410bbe7c972eff3" + + def testVerifyPiece(self, site): + inner_path = self.createBigfile(site) + + # Verify all 10 piece + f = site.storage.open(inner_path, "rb") + for i in range(10): + piece = StringIO(f.read(1024 * 1024)) + piece.seek(0) + site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece) + f.close() + + # Try to verify piece 0 with piece 1 hash + with pytest.raises(VerifyError) as err: + i = 1 + f = site.storage.open(inner_path, "rb") + piece = StringIO(f.read(1024 * 1024)) + f.close() + site.content_manager.verifyPiece(inner_path, i * 1024 * 1024, piece) + assert "Invalid hash" in str(err) + + def testSparseFile(self, site): + inner_path = "sparsefile" + + # Create a 100MB sparse file + site.storage.createSparseFile(inner_path, 100 * 1024 * 1024) + + # Write to file beginning + s = time.time() + f = site.storage.write("%s|%s-%s" % (inner_path, 0, 1024 * 1024), "hellostart" * 1024) + time_write_start = time.time() - s + + # Write to file end + s = time.time() + f = site.storage.write("%s|%s-%s" % (inner_path, 99 * 1024 * 1024, 99 * 1024 * 1024 + 1024 * 1024), "helloend" * 1024) + time_write_end = time.time() - s + + # Verify writes + f = site.storage.open(inner_path) + assert f.read(10) == "hellostart" + f.seek(99 * 1024 * 1024) + assert f.read(8) == "helloend" + f.close() + + site.storage.delete(inner_path) + + # Writing to end shold not take much longer, than writing to start + assert time_write_end <= max(0.1, time_write_start * 1.1) + + def testRangedFileRequest(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + file_server.sites[site.address] = site + client = FileServer(file_server.ip, 1545) + client.sites[site_temp.address] = site_temp + site_temp.connection_server = client + connection = client.getConnection(file_server.ip, 1544) + + # Add file_server as peer to client + peer_file_server = site_temp.addPeer(file_server.ip, 1544) + + buff = peer_file_server.getFile(site_temp.address, "%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) + + assert len(buff.getvalue()) == 1 * 1024 * 1024 # Correct block size + assert buff.getvalue().startswith("Test524") # Correct data + buff.seek(0) + assert site.content_manager.verifyPiece(inner_path, 5 * 1024 * 1024, buff) # Correct hash + + connection.close() + client.stop() + + def testRangedFileDownload(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Make sure the file and the piecemap in the optional hashfield + file_info = site.content_manager.getFileInfo(inner_path) + assert site.content_manager.hashfield.hasHash(file_info["sha512"]) + + piecemap_hash = site.content_manager.getFileInfo(file_info["piecemap"])["sha512"] + assert site.content_manager.hashfield.hasHash(piecemap_hash) + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + peer_client = site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] + assert not bad_files + + # client_piecefield = peer_client.piecefields[file_info["sha512"]].tostring() + # assert client_piecefield == "1" * 10 + + # Download 5. and 10. block + + site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) + site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024)) + + # Verify 0. block not downloaded + f = site_temp.storage.open(inner_path) + assert f.read(10) == "\0" * 10 + # Verify 5. and 10. block downloaded + f.seek(5 * 1024 * 1024) + assert f.read(7) == "Test524" + f.seek(9 * 1024 * 1024) + assert f.read(7) == "943---T" + + # Verify hashfield + assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) # 18343: data/optional.any.iso, 30970: data/optional.any.iso.hashmap.msgpack + + def testOpenBigfile(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Open virtual file + assert not site_temp.storage.isFile(inner_path) + + with site_temp.storage.openBigfile(inner_path) as f: + with Spy.Spy(FileRequest, "route") as requests: + f.seek(5 * 1024 * 1024) + assert f.read(7) == "Test524" + + f.seek(9 * 1024 * 1024) + assert f.read(7) == "943---T" + + assert len(requests) == 4 # 1x peicemap + 1x getpiecefield + 2x for pieces + + assert set(site_temp.content_manager.hashfield) == set([18343, 30970]) + + assert site_temp.storage.piecefields[f.sha512].tostring() == "0000010001" + assert f.sha512 in site_temp.getSettingsCache()["piecefields"] + + # Test requesting already downloaded + with Spy.Spy(FileRequest, "route") as requests: + f.seek(5 * 1024 * 1024) + assert f.read(7) == "Test524" + + assert len(requests) == 0 + + # Test requesting multi-block overflow reads + with Spy.Spy(FileRequest, "route") as requests: + f.seek(5 * 1024 * 1024) # We already have this block + data = f.read(1024 * 1024 * 3) # Our read overflow to 6. and 7. block + assert data.startswith("Test524") + assert data.endswith("Test838-") + assert "\0" not in data # No null bytes allowed + + assert len(requests) == 2 # Two block download + + # Test out of range request + f.seek(5 * 1024 * 1024) + data = f.read(1024 * 1024 * 30) + assert len(data) == 10 * 1000 * 1000 - (5 * 1024 * 1024) + + f.seek(30 * 1024 * 1024) + data = f.read(1024 * 1024 * 30) + assert len(data) == 0 + + @pytest.mark.parametrize("piecefield_obj", [BigfilePiecefield, BigfilePiecefieldPacked]) + def testPiecefield(self, piecefield_obj, site): + testdatas = [ + "1" * 100 + "0" * 900 + "1" * 4000 + "0" * 4999 + "1", + "010101" * 10 + "01" * 90 + "10" * 400 + "0" * 4999, + "1" * 10000, + "0" * 10000 + ] + for testdata in testdatas: + piecefield = piecefield_obj() + + piecefield.fromstring(testdata) + assert piecefield.tostring() == testdata + assert piecefield[0] == int(testdata[0]) + assert piecefield[100] == int(testdata[100]) + assert piecefield[1000] == int(testdata[1000]) + assert piecefield[len(testdata) - 1] == int(testdata[len(testdata) - 1]) + + packed = piecefield.pack() + piecefield_new = piecefield_obj() + piecefield_new.unpack(packed) + assert piecefield.tostring() == piecefield_new.tostring() + assert piecefield_new.tostring() == testdata + + def testFileGet(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + site_temp.connection_server = FileServer(file_server.ip, 1545) + site_temp.connection_server.sites[site_temp.address] = site_temp + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Download second block + with site_temp.storage.openBigfile(inner_path) as f: + f.seek(1024 * 1024) + assert f.read(1024)[0] != "\0" + + # Make sure first block not download + with site_temp.storage.open(inner_path) as f: + assert f.read(1024)[0] == "\0" + + peer2 = site.addPeer(file_server.ip, 1545, return_peer=True) + + # Should drop error on first block request + assert not peer2.getFile(site.address, "%s|0-%s" % (inner_path, 1024 * 1024 * 1)) + + # Should not drop error for second block request + assert peer2.getFile(site.address, "%s|%s-%s" % (inner_path, 1024 * 1024 * 1, 1024 * 1024 * 2)) + + def benchmarkPeerMemory(self, site, file_server): + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + import psutil, os + meminfo = psutil.Process(os.getpid()).memory_info + + mem_s = meminfo()[0] + s = time.time() + for i in range(25000): + site.addPeer(file_server.ip, i) + print "%.3fs MEM: + %sKB" % (time.time() - s, (meminfo()[0] - mem_s) / 1024) # 0.082s MEM: + 6800KB + print site.peers.values()[0].piecefields + + def testUpdatePiecefield(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + server1 = file_server + server1.sites[site.address] = site + server2 = FileServer(file_server.ip, 1545) + server2.sites[site_temp.address] = site_temp + site_temp.connection_server = server2 + + # Add file_server as peer to client + server2_peer1 = site_temp.addPeer(file_server.ip, 1544) + + # Testing piecefield sync + assert len(server2_peer1.piecefields) == 0 + assert server2_peer1.updatePiecefields() # Query piecefields from peer + assert len(server2_peer1.piecefields) > 0 + + def testWorkerManagerPiecefieldDeny(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + server1 = file_server + server1.sites[site.address] = site + server2 = FileServer(file_server.ip, 1545) + server2.sites[site_temp.address] = site_temp + site_temp.connection_server = server2 + + # Add file_server as peer to client + server2_peer1 = site_temp.addPeer(file_server.ip, 1544) # Working + + site_temp.downloadContent("content.json", download_files=False) + site_temp.needFile("data/optional.any.iso.piecemap.msgpack") + + # Add fake peers with optional files downloaded + for i in range(5): + fake_peer = site_temp.addPeer("127.0.1.%s" % i, 1544) + fake_peer.hashfield = site.content_manager.hashfield + fake_peer.has_hashfield = True + + with Spy.Spy(WorkerManager, "addWorker") as requests: + site_temp.needFile("%s|%s-%s" % (inner_path, 5 * 1024 * 1024, 6 * 1024 * 1024)) + site_temp.needFile("%s|%s-%s" % (inner_path, 6 * 1024 * 1024, 7 * 1024 * 1024)) + + # It should only request parts from peer1 as the other peers does not have the requested parts in piecefields + assert len([request[1] for request in requests if request[1] != server2_peer1]) == 0 + + def testWorkerManagerPiecefieldDownload(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + server1 = file_server + server1.sites[site.address] = site + server2 = FileServer(file_server.ip, 1545) + server2.sites[site_temp.address] = site_temp + site_temp.connection_server = server2 + sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] + + # Create 10 fake peer for each piece + for i in range(10): + peer = Peer(file_server.ip, 1544, site_temp, server2) + peer.piecefields[sha512][i] = "1" + peer.updateHashfield = mock.MagicMock(return_value=False) + peer.updatePiecefields = mock.MagicMock(return_value=False) + peer.findHashIds = mock.MagicMock(return_value={"nope": []}) + peer.hashfield = site.content_manager.hashfield + peer.has_hashfield = True + peer.key = "Peer:%s" % i + site_temp.peers["Peer:%s" % i] = peer + + site_temp.downloadContent("content.json", download_files=False) + site_temp.needFile("data/optional.any.iso.piecemap.msgpack") + + with Spy.Spy(Peer, "getFile") as requests: + for i in range(10): + site_temp.needFile("%s|%s-%s" % (inner_path, i * 1024 * 1024, (i + 1) * 1024 * 1024)) + + assert len(requests) == 10 + for i in range(10): + assert requests[i][0] == site_temp.peers["Peer:%s" % i] # Every part should be requested from piece owner peer + + def testDownloadStats(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Open virtual file + assert not site_temp.storage.isFile(inner_path) + + # Check size before downloads + assert site_temp.settings["size"] < 10 * 1024 * 1024 + assert site_temp.settings["optional_downloaded"] == 0 + size_piecemap = site_temp.content_manager.getFileInfo(inner_path + ".piecemap.msgpack")["size"] + size_bigfile = site_temp.content_manager.getFileInfo(inner_path)["size"] + + with site_temp.storage.openBigfile(inner_path) as f: + assert "\0" not in f.read(1024) + assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile + + with site_temp.storage.openBigfile(inner_path) as f: + # Don't count twice + assert "\0" not in f.read(1024) + assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile + + # Add second block + assert "\0" not in f.read(1024 * 1024) + assert site_temp.settings["optional_downloaded"] == size_piecemap + size_bigfile + + def testPrebuffer(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Open virtual file + assert not site_temp.storage.isFile(inner_path) + + with site_temp.storage.openBigfile(inner_path, prebuffer=1024 * 1024 * 2) as f: + with Spy.Spy(FileRequest, "route") as requests: + f.seek(5 * 1024 * 1024) + assert f.read(7) == "Test524" + # assert len(requests) == 3 # 1x piecemap + 1x getpiecefield + 1x for pieces + assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 2 + + time.sleep(0.5) # Wait prebuffer download + + sha512 = site.content_manager.getFileInfo(inner_path)["sha512"] + assert site_temp.storage.piecefields[sha512].tostring() == "0000011100" + + # No prebuffer beyond end of the file + f.seek(9 * 1024 * 1024) + assert "\0" not in f.read(7) + + assert len([task for task in site_temp.worker_manager.tasks if task["inner_path"].startswith(inner_path)]) == 0 + + def testDownloadAllPieces(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Open virtual file + assert not site_temp.storage.isFile(inner_path) + + with Spy.Spy(FileRequest, "route") as requests: + site_temp.needFile("%s|all" % inner_path) + + assert len(requests) == 12 # piecemap.msgpack, getPiecefields, 10 x piece + + # Don't re-download already got pieces + with Spy.Spy(FileRequest, "route") as requests: + site_temp.needFile("%s|all" % inner_path) + + assert len(requests) == 0 + + def testFileSize(self, file_server, site, site_temp): + inner_path = self.createBigfile(site) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = ConnectionServer(file_server.ip, 1545) + site_temp.connection_server = client + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + # Open virtual file + assert not site_temp.storage.isFile(inner_path) + + # Download first block + site_temp.needFile("%s|%s-%s" % (inner_path, 0 * 1024 * 1024, 1 * 1024 * 1024)) + assert site_temp.storage.getSize(inner_path) < 1000 * 1000 * 10 # Size on the disk should be smaller than the real size + + site_temp.needFile("%s|%s-%s" % (inner_path, 9 * 1024 * 1024, 10 * 1024 * 1024)) + assert site_temp.storage.getSize(inner_path) == site.storage.getSize(inner_path) + + @pytest.mark.parametrize("size", [1024 * 3, 1024 * 1024 * 3, 1024 * 1024 * 30]) + def testNullFileRead(self, file_server, site, site_temp, size): + inner_path = "data/optional.iso" + + f = site.storage.open(inner_path, "w") + f.write("\0" * size) + f.close() + assert site.content_manager.sign("content.json", self.privatekey) + + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + site_temp.connection_server = FileServer(file_server.ip, 1545) + site_temp.connection_server.sites[site_temp.address] = site_temp + site_temp.addPeer(file_server.ip, 1544) + + # Download site + site_temp.download(blind_includes=True).join(timeout=5) + + if "piecemap" in site.content_manager.getFileInfo(inner_path): # Bigfile + site_temp.needFile(inner_path + "|all") + else: + site_temp.needFile(inner_path) + + + assert site_temp.storage.getSize(inner_path) == size diff --git a/plugins/Bigfile/Test/conftest.py b/plugins/Bigfile/Test/conftest.py new file mode 100644 index 000000000..634e66e2e --- /dev/null +++ b/plugins/Bigfile/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * diff --git a/plugins/Bigfile/Test/pytest.ini b/plugins/Bigfile/Test/pytest.ini new file mode 100644 index 000000000..d09210d1d --- /dev/null +++ b/plugins/Bigfile/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/Bigfile/__init__.py b/plugins/Bigfile/__init__.py new file mode 100644 index 000000000..005d66614 --- /dev/null +++ b/plugins/Bigfile/__init__.py @@ -0,0 +1,2 @@ +import BigfilePlugin +from BigfilePiecefield import BigfilePiecefield, BigfilePiecefieldPacked \ No newline at end of file diff --git a/plugins/Chart/ChartCollector.py b/plugins/Chart/ChartCollector.py new file mode 100644 index 000000000..471c4b91c --- /dev/null +++ b/plugins/Chart/ChartCollector.py @@ -0,0 +1,182 @@ +import time +import sys +import collections +import itertools +import logging + +import gevent +from util import helper +from Config import config + + +class ChartCollector(object): + def __init__(self, db): + self.db = db + if config.action == "main": + gevent.spawn_later(60 * 3, self.collector) + self.log = logging.getLogger("ChartCollector") + self.last_values = collections.defaultdict(dict) + + def setInitialLastValues(self, sites): + # Recover last value of site bytes/sent + for site in sites: + self.last_values["site:" + site.address]["site_bytes_recv"] = site.settings.get("bytes_recv", 0) + self.last_values["site:" + site.address]["site_bytes_sent"] = site.settings.get("bytes_sent", 0) + + def getCollectors(self): + collectors = {} + file_server = sys.modules["main"].file_server + sites = file_server.sites + if not sites: + return collectors + content_db = sites.values()[0].content_manager.contents.db + + # Connection stats + collectors["connection"] = lambda: len(file_server.connections) + collectors["connection_in"] = ( + lambda: len([1 for connection in file_server.connections if connection.type == "in"]) + ) + collectors["connection_onion"] = ( + lambda: len([1 for connection in file_server.connections if connection.ip.endswith(".onion")]) + ) + collectors["connection_ping_avg"] = ( + lambda: round(1000 * helper.avg( + [connection.last_ping_delay for connection in file_server.connections if connection.last_ping_delay] + )) + ) + collectors["connection_ping_min"] = ( + lambda: round(1000 * min( + [connection.last_ping_delay for connection in file_server.connections if connection.last_ping_delay] + )) + ) + collectors["connection_rev_avg"] = ( + lambda: helper.avg( + [connection.handshake["rev"] for connection in file_server.connections if connection.handshake] + ) + ) + + # Request stats + collectors["file_bytes_recv|change"] = lambda: file_server.bytes_recv + collectors["file_bytes_sent|change"] = lambda: file_server.bytes_sent + collectors["request_num_recv|change"] = lambda: file_server.num_recv + collectors["request_num_sent|change"] = lambda: file_server.num_sent + + # Limit + collectors["optional_limit"] = lambda: content_db.getOptionalLimitBytes() + collectors["optional_used"] = lambda: content_db.getOptionalUsedBytes() + collectors["optional_downloaded"] = lambda: sum([site.settings.get("optional_downloaded", 0) for site in sites.values()]) + + # Peers + collectors["peer"] = lambda (peers): len(peers) + collectors["peer_onion"] = lambda (peers): len([True for peer in peers if ".onion" in peer]) + + # Size + collectors["size"] = lambda: sum([site.settings.get("size", 0) for site in sites.values()]) + collectors["size_optional"] = lambda: sum([site.settings.get("size_optional", 0) for site in sites.values()]) + collectors["content"] = lambda: sum([len(site.content_manager.contents) for site in sites.values()]) + + return collectors + + def getSiteCollectors(self): + site_collectors = {} + + # Size + site_collectors["site_size"] = lambda(site): site.settings.get("size", 0) + site_collectors["site_size_optional"] = lambda(site): site.settings.get("size_optional", 0) + site_collectors["site_optional_downloaded"] = lambda(site): site.settings.get("optional_downloaded", 0) + site_collectors["site_content"] = lambda(site): len(site.content_manager.contents) + + # Data transfer + site_collectors["site_bytes_recv|change"] = lambda(site): site.settings.get("bytes_recv", 0) + site_collectors["site_bytes_sent|change"] = lambda(site): site.settings.get("bytes_sent", 0) + + # Peers + site_collectors["site_peer"] = lambda(site): len(site.peers) + site_collectors["site_peer_onion"] = lambda(site): len( + [True for peer in site.peers.itervalues() if peer.ip.endswith(".onion")] + ) + site_collectors["site_peer_connected"] = lambda(site): len([True for peer in site.peers.itervalues() if peer.connection]) + + return site_collectors + + def getUniquePeers(self): + sites = sys.modules["main"].file_server.sites + return set(itertools.chain.from_iterable( + [site.peers.keys() for site in sites.values()] + )) + + def collectDatas(self, collectors, last_values, site=None): + if site is None: + peers = self.getUniquePeers() + datas = {} + for key, collector in collectors.iteritems(): + try: + if site: + value = collector(site) + elif key.startswith("peer"): + value = collector(peers) + else: + value = collector() + except Exception as err: + self.log.info("Collector %s error: %s" % (key, err)) + value = None + + if "|change" in key: # Store changes relative to last value + key = key.replace("|change", "") + last_value = last_values.get(key, 0) + last_values[key] = value + value = value - last_value + + if value is None: + datas[key] = None + else: + datas[key] = round(value, 3) + return datas + + def collectGlobal(self, collectors, last_values): + now = int(time.time()) + s = time.time() + datas = self.collectDatas(collectors, last_values["global"]) + values = [] + for key, value in datas.iteritems(): + values.append((self.db.getTypeId(key), value, now)) + self.log.debug("Global collectors done in %.3fs" % (time.time() - s)) + + s = time.time() + cur = self.db.getCursor() + cur.execute("BEGIN") + cur.cursor.executemany("INSERT INTO data (type_id, value, date_added) VALUES (?, ?, ?)", values) + cur.execute("END") + cur.close() + self.log.debug("Global collectors inserted in %.3fs" % (time.time() - s)) + + def collectSites(self, sites, collectors, last_values): + now = int(time.time()) + s = time.time() + values = [] + for address, site in sites.iteritems(): + site_datas = self.collectDatas(collectors, last_values["site:%s" % address], site) + for key, value in site_datas.iteritems(): + values.append((self.db.getTypeId(key), self.db.getSiteId(address), value, now)) + time.sleep(0.000001) + self.log.debug("Site collections done in %.3fs" % (time.time() - s)) + + s = time.time() + cur = self.db.getCursor() + cur.execute("BEGIN") + cur.cursor.executemany("INSERT INTO data (type_id, site_id, value, date_added) VALUES (?, ?, ?, ?)", values) + cur.execute("END") + cur.close() + self.log.debug("Site collectors inserted in %.3fs" % (time.time() - s)) + + def collector(self): + collectors = self.getCollectors() + site_collectors = self.getSiteCollectors() + sites = sys.modules["main"].file_server.sites + i = 0 + while 1: + self.collectGlobal(collectors, self.last_values) + if i % 12 == 0: # Only collect sites data every hour + self.collectSites(sites, site_collectors, self.last_values) + time.sleep(60 * 5) + i += 1 diff --git a/plugins/Chart/ChartDb.py b/plugins/Chart/ChartDb.py new file mode 100644 index 000000000..3747dca32 --- /dev/null +++ b/plugins/Chart/ChartDb.py @@ -0,0 +1,133 @@ +from Config import config +from Db import Db +import time + + +class ChartDb(Db): + def __init__(self): + self.version = 2 + super(ChartDb, self).__init__(self.getSchema(), "%s/chart.db" % config.data_dir) + self.foreign_keys = True + self.checkTables() + self.sites = self.loadSites() + self.types = self.loadTypes() + + def getSchema(self): + schema = {} + schema["db_name"] = "Chart" + schema["tables"] = {} + schema["tables"]["data"] = { + "cols": [ + ["data_id", "INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE"], + ["type_id", "INTEGER NOT NULL"], + ["site_id", "INTEGER"], + ["value", "INTEGER"], + ["date_added", "DATETIME DEFAULT (CURRENT_TIMESTAMP)"] + ], + "indexes": [ + "CREATE INDEX site_id ON data (site_id)", + "CREATE INDEX date_added ON data (date_added)" + ], + "schema_changed": 2 + } + schema["tables"]["type"] = { + "cols": [ + ["type_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"], + ["name", "TEXT"] + ], + "schema_changed": 1 + } + schema["tables"]["site"] = { + "cols": [ + ["site_id", "INTEGER PRIMARY KEY NOT NULL UNIQUE"], + ["address", "TEXT"] + ], + "schema_changed": 1 + } + return schema + + def getTypeId(self, name): + if name not in self.types: + self.execute("INSERT INTO type ?", {"name": name}) + self.types[name] = self.cur.cursor.lastrowid + + return self.types[name] + + def getSiteId(self, address): + if address not in self.sites: + self.execute("INSERT INTO site ?", {"address": address}) + self.sites[address] = self.cur.cursor.lastrowid + + return self.sites[address] + + def loadSites(self): + sites = {} + for row in self.execute("SELECT * FROM site"): + sites[row["address"]] = row["site_id"] + return sites + + def loadTypes(self): + types = {} + for row in self.execute("SELECT * FROM type"): + types[row["name"]] = row["type_id"] + return types + + def deleteSite(self, address): + if address in self.sites: + site_id = self.sites[address] + del self.sites[address] + self.execute("DELETE FROM site WHERE ?", {"site_id": site_id}) + self.execute("DELETE FROM data WHERE ?", {"site_id": site_id}) + + def archive(self): + week_back = 1 + while 1: + s = time.time() + date_added_from = time.time() - 60 * 60 * 24 * 7 * (week_back + 1) + date_added_to = date_added_from + 60 * 60 * 24 * 7 + res = self.execute(""" + SELECT + MAX(date_added) AS date_added, + SUM(value) AS value, + GROUP_CONCAT(data_id) AS data_ids, + type_id, + site_id, + COUNT(*) AS num + FROM data + WHERE + site_id IS NULL AND + date_added > :date_added_from AND + date_added < :date_added_to + GROUP BY strftime('%Y-%m-%d %H', date_added, 'unixepoch', 'localtime'), type_id + """, {"date_added_from": date_added_from, "date_added_to": date_added_to}) + + num_archived = 0 + cur = self.getCursor() + for row in res: + if row["num"] == 1: + continue + cur.execute("INSERT INTO data ?", { + "type_id": row["type_id"], + "site_id": row["site_id"], + "value": row["value"], + "date_added": row["date_added"] + }) + cur.execute("DELETE FROM data WHERE data_id IN (%s)" % row["data_ids"]) + num_archived += row["num"] + self.log.debug("Archived %s data from %s weeks ago in %.3fs" % (num_archived, week_back, time.time() - s)) + week_back += 1 + time.sleep(0.1) + if num_archived == 0: + break + # Only keep 6 month of global stats + self.execute( + "DELETE FROM data WHERE site_id IS NULL AND date_added < :date_added_limit", + {"date_added_limit": time.time() - 60 * 60 * 24 * 30 * 6 } + ) + # Only keep 1 month of site stats + self.execute( + "DELETE FROM data WHERE site_id IS NOT NULL AND date_added < :date_added_limit", + {"date_added_limit": time.time() - 60 * 60 * 24 * 30 } + ) + if week_back > 1: + self.execute("VACUUM") diff --git a/plugins/Chart/ChartPlugin.py b/plugins/Chart/ChartPlugin.py new file mode 100644 index 000000000..a491618b2 --- /dev/null +++ b/plugins/Chart/ChartPlugin.py @@ -0,0 +1,60 @@ +import time +import itertools + +import gevent + +from Config import config +from util import helper +from Plugin import PluginManager +from ChartDb import ChartDb +from ChartCollector import ChartCollector + +if "db" not in locals().keys(): # Share on reloads + db = ChartDb() + gevent.spawn_later(10 * 60, db.archive) + helper.timer(60 * 60 * 6, db.archive) + collector = ChartCollector(db) + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + def load(self, *args, **kwargs): + back = super(SiteManagerPlugin, self).load(*args, **kwargs) + collector.setInitialLastValues(self.sites.values()) + return back + + def delete(self, address, *args, **kwargs): + db.deleteSite(address) + return super(SiteManagerPlugin, self).delete(address, *args, **kwargs) + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def actionChartDbQuery(self, to, query, params=None): + if not "ADMIN" in self.permissions: + return {"error": "No permission"} + + if config.debug or config.verbose: + s = time.time() + rows = [] + try: + if not query.strip().upper().startswith("SELECT"): + raise Exception("Only SELECT query supported") + res = db.execute(query, params) + except Exception, err: # Response the error to client + self.log.error("ChartDbQuery error: %s" % err) + return {"error": str(err)} + # Convert result to dict + for row in res: + rows.append(dict(row)) + if config.verbose and time.time() - s > 0.1: # Log slow query + self.log.debug("Slow query: %s (%.3fs)" % (query, time.time() - s)) + return rows + + def actionChartGetPeerLocations(self, to): + if not "ADMIN" in self.permissions: + return {"error": "No permission"} + + peers = {} + for site in self.server.sites.values(): + peers.update(site.peers) + peer_locations = self.getPeerLocations(peers) + return peer_locations diff --git a/plugins/Chart/__init__.py b/plugins/Chart/__init__.py new file mode 100644 index 000000000..78981122d --- /dev/null +++ b/plugins/Chart/__init__.py @@ -0,0 +1 @@ +import ChartPlugin \ No newline at end of file diff --git a/plugins/ContentFilter/ContentFilterPlugin.py b/plugins/ContentFilter/ContentFilterPlugin.py new file mode 100644 index 000000000..05f333765 --- /dev/null +++ b/plugins/ContentFilter/ContentFilterPlugin.py @@ -0,0 +1,223 @@ +import time +import re +import cgi +import hashlib + +from Plugin import PluginManager +from Translate import Translate +from Config import config + +from ContentFilterStorage import ContentFilterStorage + + +if "_" not in locals(): + _ = Translate("plugins/ContentFilter/languages/") + + +@PluginManager.registerTo("SiteManager") +class SiteManagerPlugin(object): + def load(self, *args, **kwargs): + global filter_storage + super(SiteManagerPlugin, self).load(*args, **kwargs) + filter_storage = ContentFilterStorage(site_manager=self) + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + # Mute + def cbMuteAdd(self, to, auth_address, cert_user_id, reason): + filter_storage.file_content["mutes"][auth_address] = { + "cert_user_id": cert_user_id, "reason": reason, "source": self.site.address, "date_added": time.time() + } + filter_storage.save() + filter_storage.changeDbs(auth_address, "remove") + self.response(to, "ok") + + def actionMuteAdd(self, to, auth_address, cert_user_id, reason): + if "ADMIN" in self.getPermissions(to): + self.cbMuteAdd(to, auth_address, cert_user_id, reason) + else: + self.cmd( + "confirm", + [_["Hide all content from %s?"] % cgi.escape(cert_user_id), _["Mute"]], + lambda (res): self.cbMuteAdd(to, auth_address, cert_user_id, reason) + ) + + def cbMuteRemove(self, to, auth_address): + del filter_storage.file_content["mutes"][auth_address] + filter_storage.save() + filter_storage.changeDbs(auth_address, "load") + self.response(to, "ok") + + def actionMuteRemove(self, to, auth_address): + if "ADMIN" in self.getPermissions(to): + self.cbMuteRemove(to, auth_address) + else: + self.cmd( + "confirm", + [_["Unmute %s?"] % cgi.escape(filter_storage.file_content["mutes"][auth_address]["cert_user_id"]), _["Unmute"]], + lambda (res): self.cbMuteRemove(to, auth_address) + ) + + def actionMuteList(self, to): + if "ADMIN" in self.getPermissions(to): + self.response(to, filter_storage.file_content["mutes"]) + else: + return self.response(to, {"error": "Forbidden: Only ADMIN sites can list mutes"}) + + # Siteblock + def actionSiteblockAdd(self, to, site_address, reason=None): + if "ADMIN" not in self.getPermissions(to): + return self.response(to, {"error": "Forbidden: Only ADMIN sites can add to blocklist"}) + filter_storage.file_content["siteblocks"][site_address] = {"date_added": time.time(), "reason": reason} + filter_storage.save() + self.response(to, "ok") + + def actionSiteblockRemove(self, to, site_address): + if "ADMIN" not in self.getPermissions(to): + return self.response(to, {"error": "Forbidden: Only ADMIN sites can remove from blocklist"}) + del filter_storage.file_content["siteblocks"][site_address] + filter_storage.save() + self.response(to, "ok") + + def actionSiteblockList(self, to): + if "ADMIN" in self.getPermissions(to): + self.response(to, filter_storage.file_content["siteblocks"]) + else: + return self.response(to, {"error": "Forbidden: Only ADMIN sites can list blocklists"}) + + # Include + def actionFilterIncludeAdd(self, to, inner_path, description=None, address=None): + if address: + if "ADMIN" not in self.getPermissions(to): + return self.response(to, {"error": "Forbidden: Only ADMIN sites can manage different site include"}) + site = self.server.sites[address] + else: + address = self.site.address + site = self.site + + if "ADMIN" in self.getPermissions(to): + self.cbFilterIncludeAdd(to, True, address, inner_path, description) + else: + content = site.storage.loadJson(inner_path) + title = _["New shared global content filter: %s (%s sites, %s users)"] % ( + cgi.escape(inner_path), len(content.get("siteblocks", {})), len(content.get("mutes", {})) + ) + + self.cmd( + "confirm", + [title, "Add"], + lambda (res): self.cbFilterIncludeAdd(to, res, address, inner_path, description) + ) + + def cbFilterIncludeAdd(self, to, res, address, inner_path, description): + if not res: + self.response(to, res) + return False + + filter_storage.includeAdd(address, inner_path, description) + self.response(to, "ok") + + def actionFilterIncludeRemove(self, to, inner_path, address=None): + if address: + if "ADMIN" not in self.getPermissions(to): + return self.response(to, {"error": "Forbidden: Only ADMIN sites can manage different site include"}) + else: + address = self.site.address + + key = "%s/%s" % (address, inner_path) + if key not in filter_storage.file_content["includes"]: + self.response(to, {"error": "Include not found"}) + filter_storage.includeRemove(address, inner_path) + self.response(to, "ok") + + def actionFilterIncludeList(self, to, all_sites=False, filters=False): + if all_sites and "ADMIN" not in self.getPermissions(to): + return self.response(to, {"error": "Forbidden: Only ADMIN sites can list all sites includes"}) + + back = [] + includes = filter_storage.file_content.get("includes", {}).values() + for include in includes: + if not all_sites and include["address"] != self.site.address: + continue + if filters: + include = dict(include) # Don't modify original file_content + include_site = filter_storage.site_manager.get(include["address"]) + if not include_site: + continue + try: + content = include_site.storage.loadJson(include["inner_path"]) + include["error"] = None + except Exception as err: + if include_site.settings["own"]: + include_site.log.warning("Error loading filter %s: %s" % (include["inner_path"], err)) + content = {} + include["error"] = str(err) + include["mutes"] = content.get("mutes", {}) + include["siteblocks"] = content.get("siteblocks", {}) + back.append(include) + self.response(to, back) + + +@PluginManager.registerTo("SiteStorage") +class SiteStoragePlugin(object): + def updateDbFile(self, inner_path, file=None, cur=None): + if file is not False: # File deletion always allowed + # Find for bitcoin addresses in file path + matches = re.findall("/(1[A-Za-z0-9]{26,35})/", inner_path) + # Check if any of the adresses are in the mute list + for auth_address in matches: + if filter_storage.isMuted(auth_address): + self.log.debug("Mute match: %s, ignoring %s" % (auth_address, inner_path)) + return False + + return super(SiteStoragePlugin, self).updateDbFile(inner_path, file=file, cur=cur) + + def onUpdated(self, inner_path, file=None): + file_path = "%s/%s" % (self.site.address, inner_path) + if file_path in filter_storage.file_content["includes"]: + self.log.debug("Filter file updated: %s" % inner_path) + filter_storage.includeUpdateAll() + return super(SiteStoragePlugin, self).onUpdated(inner_path, file=file) + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def actionWrapper(self, path, extra_headers=None): + match = re.match("/(?P
[A-Za-z0-9\._-]+)(?P/.*|$)", path) + if not match: + return False + address = match.group("address") + + if self.server.site_manager.get(address): # Site already exists + return super(UiRequestPlugin, self).actionWrapper(path, extra_headers) + + if self.server.site_manager.isDomain(address): + address = self.server.site_manager.resolveDomain(address) + + if address: + address_sha256 = "0x" + hashlib.sha256(address).hexdigest() + else: + address_sha256 = None + + if filter_storage.isSiteblocked(address) or filter_storage.isSiteblocked(address_sha256): + site = self.server.site_manager.get(config.homepage) + if not extra_headers: + extra_headers = {} + + script_nonce = self.getScriptNonce() + + self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce) + return iter([super(UiRequestPlugin, self).renderWrapper( + site, path, "uimedia/plugins/contentfilter/blocklisted.html?address=" + address, + "Blacklisted site", extra_headers, show_loadingscreen=False, script_nonce=script_nonce + )]) + else: + return super(UiRequestPlugin, self).actionWrapper(path, extra_headers) + + def actionUiMedia(self, path, *args, **kwargs): + if path.startswith("/uimedia/plugins/contentfilter/"): + file_path = path.replace("/uimedia/plugins/contentfilter/", "plugins/ContentFilter/media/") + return self.actionFile(file_path) + else: + return super(UiRequestPlugin, self).actionUiMedia(path) diff --git a/plugins/ContentFilter/ContentFilterStorage.py b/plugins/ContentFilter/ContentFilterStorage.py new file mode 100644 index 000000000..17af298f7 --- /dev/null +++ b/plugins/ContentFilter/ContentFilterStorage.py @@ -0,0 +1,140 @@ +import os +import json +import logging +import collections +import time + +from Debug import Debug +from Plugin import PluginManager +from Config import config +from util import helper + +class ContentFilterStorage(object): + def __init__(self, site_manager): + self.log = logging.getLogger("ContentFilterStorage") + self.file_path = "%s/filters.json" % config.data_dir + self.site_manager = site_manager + self.file_content = self.load() + + # Set default values for filters.json + if not self.file_content: + self.file_content = {} + + # Site blacklist renamed to site blocks + if "site_blacklist" in self.file_content: + self.file_content["siteblocks"] = self.file_content["site_blacklist"] + del self.file_content["site_blacklist"] + + for key in ["mutes", "siteblocks", "includes"]: + if key not in self.file_content: + self.file_content[key] = {} + + self.include_filters = collections.defaultdict(set) # Merged list of mutes and blacklists from all include + self.includeUpdateAll(update_site_dbs=False) + + def load(self): + # Rename previously used mutes.json -> filters.json + if os.path.isfile("%s/mutes.json" % config.data_dir): + self.log.info("Renaming mutes.json to filters.json...") + os.rename("%s/mutes.json" % config.data_dir, self.file_path) + if os.path.isfile(self.file_path): + try: + return json.load(open(self.file_path)) + except Exception as err: + self.log.error("Error loading filters.json: %s" % err) + return None + else: + return None + + def includeUpdateAll(self, update_site_dbs=True): + s = time.time() + new_include_filters = collections.defaultdict(set) + + # Load all include files data into a merged set + for include_path in self.file_content["includes"]: + address, inner_path = include_path.split("/", 1) + try: + content = self.site_manager.get(address).storage.loadJson(inner_path) + except Exception as err: + self.log.warning( + "Error loading include %s: %s" % + (include_path, Debug.formatException(err)) + ) + continue + + for key, val in content.iteritems(): + if type(val) is not dict: + continue + + new_include_filters[key].update(val.keys()) + + mutes_added = new_include_filters["mutes"].difference(self.include_filters["mutes"]) + mutes_removed = self.include_filters["mutes"].difference(new_include_filters["mutes"]) + + self.include_filters = new_include_filters + + if update_site_dbs: + for auth_address in mutes_added: + self.changeDbs(auth_address, "remove") + + for auth_address in mutes_removed: + if not self.isMuted(auth_address): + self.changeDbs(auth_address, "load") + + num_mutes = len(self.include_filters["mutes"]) + num_siteblocks = len(self.include_filters["siteblocks"]) + self.log.debug( + "Loaded %s mutes, %s blocked sites from %s includes in %.3fs" % + (num_mutes, num_siteblocks, len(self.file_content["includes"]), time.time() - s) + ) + + def includeAdd(self, address, inner_path, description=None): + self.file_content["includes"]["%s/%s" % (address, inner_path)] = { + "date_added": time.time(), + "address": address, + "description": description, + "inner_path": inner_path + } + self.includeUpdateAll() + self.save() + + def includeRemove(self, address, inner_path): + del self.file_content["includes"]["%s/%s" % (address, inner_path)] + self.includeUpdateAll() + self.save() + + def save(self): + s = time.time() + helper.atomicWrite(self.file_path, json.dumps(self.file_content, indent=2, sort_keys=True)) + self.log.debug("Saved in %.3fs" % (time.time() - s)) + + def isMuted(self, auth_address): + if auth_address in self.file_content["mutes"] or auth_address in self.include_filters["mutes"]: + return True + else: + return False + + def isSiteblocked(self, address): + if address in self.file_content["siteblocks"] or address in self.include_filters["siteblocks"]: + return True + else: + return False + + # Search and remove or readd files of an user + def changeDbs(self, auth_address, action): + self.log.debug("Mute action %s on user %s" % (action, auth_address)) + res = self.site_manager.list().values()[0].content_manager.contents.db.execute( + "SELECT * FROM content LEFT JOIN site USING (site_id) WHERE inner_path LIKE :inner_path", + {"inner_path": "%%/%s/%%" % auth_address} + ) + for row in res: + site = self.site_manager.sites.get(row["address"]) + if not site: + continue + dir_inner_path = helper.getDirname(row["inner_path"]) + for file_name in site.storage.walk(dir_inner_path): + if action == "remove": + site.storage.onUpdated(dir_inner_path + file_name, False) + else: + site.storage.onUpdated(dir_inner_path + file_name) + site.onFileDone(dir_inner_path + file_name) diff --git a/plugins/ContentFilter/Test/TestContentFilter.py b/plugins/ContentFilter/Test/TestContentFilter.py new file mode 100644 index 000000000..e1b37b163 --- /dev/null +++ b/plugins/ContentFilter/Test/TestContentFilter.py @@ -0,0 +1,82 @@ +import pytest +from ContentFilter import ContentFilterPlugin +from Site import SiteManager + + +@pytest.fixture +def filter_storage(): + ContentFilterPlugin.filter_storage = ContentFilterPlugin.ContentFilterStorage(SiteManager.site_manager) + return ContentFilterPlugin.filter_storage + + +@pytest.mark.usefixtures("resetSettings") +@pytest.mark.usefixtures("resetTempSettings") +class TestContentFilter: + def createInclude(self, site): + site.storage.writeJson("filters.json", { + "mutes": {"1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C": {}}, + "siteblocks": {site.address: {}} + }) + + def testIncludeLoad(self, site, filter_storage): + self.createInclude(site) + filter_storage.file_content["includes"]["%s/%s" % (site.address, "filters.json")] = { + "date_added": 1528295893, + } + + assert not filter_storage.include_filters["mutes"] + assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + assert not filter_storage.isSiteblocked(site.address) + filter_storage.includeUpdateAll(update_site_dbs=False) + assert len(filter_storage.include_filters["mutes"]) == 1 + assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + assert filter_storage.isSiteblocked(site.address) + + def testIncludeAdd(self, site, filter_storage): + self.createInclude(site) + query_num_json = "SELECT COUNT(*) AS num FROM json WHERE directory = 'users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C'" + assert not filter_storage.isSiteblocked(site.address) + assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + assert site.storage.query(query_num_json).fetchone()["num"] == 2 + + # Add include + filter_storage.includeAdd(site.address, "filters.json") + + assert filter_storage.isSiteblocked(site.address) + assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + assert site.storage.query(query_num_json).fetchone()["num"] == 0 + + # Remove include + filter_storage.includeRemove(site.address, "filters.json") + + assert not filter_storage.isSiteblocked(site.address) + assert not filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + assert site.storage.query(query_num_json).fetchone()["num"] == 2 + + def testIncludeChange(self, site, filter_storage): + self.createInclude(site) + filter_storage.includeAdd(site.address, "filters.json") + assert filter_storage.isSiteblocked(site.address) + assert filter_storage.isMuted("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C") + + # Add new blocked site + assert not filter_storage.isSiteblocked("1Hello") + + filter_content = site.storage.loadJson("filters.json") + filter_content["siteblocks"]["1Hello"] = {} + site.storage.writeJson("filters.json", filter_content) + + assert filter_storage.isSiteblocked("1Hello") + + # Add new muted user + query_num_json = "SELECT COUNT(*) AS num FROM json WHERE directory = 'users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q'" + assert not filter_storage.isMuted("1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") + assert site.storage.query(query_num_json).fetchone()["num"] == 2 + + filter_content["mutes"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"] = {} + site.storage.writeJson("filters.json", filter_content) + + assert filter_storage.isMuted("1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") + assert site.storage.query(query_num_json).fetchone()["num"] == 0 + + diff --git a/plugins/ContentFilter/Test/conftest.py b/plugins/ContentFilter/Test/conftest.py new file mode 100644 index 000000000..634e66e2e --- /dev/null +++ b/plugins/ContentFilter/Test/conftest.py @@ -0,0 +1 @@ +from src.Test.conftest import * diff --git a/plugins/ContentFilter/Test/pytest.ini b/plugins/ContentFilter/Test/pytest.ini new file mode 100644 index 000000000..d09210d1d --- /dev/null +++ b/plugins/ContentFilter/Test/pytest.ini @@ -0,0 +1,5 @@ +[pytest] +python_files = Test*.py +addopts = -rsxX -v --durations=6 +markers = + webtest: mark a test as a webtest. \ No newline at end of file diff --git a/plugins/ContentFilter/__init__.py b/plugins/ContentFilter/__init__.py new file mode 100644 index 000000000..4d8c3accd --- /dev/null +++ b/plugins/ContentFilter/__init__.py @@ -0,0 +1 @@ +import ContentFilterPlugin diff --git a/plugins/ContentFilter/languages/hu.json b/plugins/ContentFilter/languages/hu.json new file mode 100644 index 000000000..9b57e6979 --- /dev/null +++ b/plugins/ContentFilter/languages/hu.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "%s tartalmaniak elrejtése?", + "Mute": "Elnémítás", + "Unmute %s?": "%s tartalmaniak megjelenítése?", + "Unmute": "Némítás visszavonása" +} diff --git a/plugins/ContentFilter/languages/it.json b/plugins/ContentFilter/languages/it.json new file mode 100644 index 000000000..9a2c6761d --- /dev/null +++ b/plugins/ContentFilter/languages/it.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "%s Vuoi nascondere i contenuti di questo utente ?", + "Mute": "Attiva Silenzia", + "Unmute %s?": "%s Vuoi mostrare i contenuti di questo utente ?", + "Unmute": "Disattiva Silenzia" +} diff --git a/plugins/ContentFilter/languages/pt-br.json b/plugins/ContentFilter/languages/pt-br.json new file mode 100644 index 000000000..3c6bfbdcf --- /dev/null +++ b/plugins/ContentFilter/languages/pt-br.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "%s Ocultar todo o conteúdo de ?", + "Mute": "Ativar o Silêncio", + "Unmute %s?": "%s Você quer mostrar o conteúdo deste usuário ?", + "Unmute": "Desligar o silêncio" +} diff --git a/plugins/ContentFilter/languages/zh-tw.json b/plugins/ContentFilter/languages/zh-tw.json new file mode 100644 index 000000000..0995f3a0f --- /dev/null +++ b/plugins/ContentFilter/languages/zh-tw.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "屏蔽 %s 的所有內容?", + "Mute": "屏蔽", + "Unmute %s?": "對 %s 解除屏蔽?", + "Unmute": "解除屏蔽" +} diff --git a/plugins/ContentFilter/languages/zh.json b/plugins/ContentFilter/languages/zh.json new file mode 100644 index 000000000..bf63f1075 --- /dev/null +++ b/plugins/ContentFilter/languages/zh.json @@ -0,0 +1,6 @@ +{ + "Hide all content from %s?": "屏蔽 %s 的所有内容?", + "Mute": "屏蔽", + "Unmute %s?": "对 %s 解除屏蔽?", + "Unmute": "解除屏蔽" +} diff --git a/plugins/ContentFilter/media/blocklisted.html b/plugins/ContentFilter/media/blocklisted.html new file mode 100644 index 000000000..9a287b722 --- /dev/null +++ b/plugins/ContentFilter/media/blocklisted.html @@ -0,0 +1,107 @@ + + + + + +
+

Site blocked

+

This site is on your blocklist:

+
+
Too much image
+
on 2015-01-25 12:32:11
+
+ +
+ + + + + + diff --git a/plugins/ContentFilter/media/js/ZeroFrame.js b/plugins/ContentFilter/media/js/ZeroFrame.js new file mode 100644 index 000000000..d6facdbf8 --- /dev/null +++ b/plugins/ContentFilter/media/js/ZeroFrame.js @@ -0,0 +1,119 @@ +// Version 1.0.0 - Initial release +// Version 1.1.0 (2017-08-02) - Added cmdp function that returns promise instead of using callback +// Version 1.2.0 (2017-08-02) - Added Ajax monkey patch to emulate XMLHttpRequest over ZeroFrame API + +const CMD_INNER_READY = 'innerReady' +const CMD_RESPONSE = 'response' +const CMD_WRAPPER_READY = 'wrapperReady' +const CMD_PING = 'ping' +const CMD_PONG = 'pong' +const CMD_WRAPPER_OPENED_WEBSOCKET = 'wrapperOpenedWebsocket' +const CMD_WRAPPER_CLOSE_WEBSOCKET = 'wrapperClosedWebsocket' + +class ZeroFrame { + constructor(url) { + this.url = url + this.waiting_cb = {} + this.wrapper_nonce = document.location.href.replace(/.*wrapper_nonce=([A-Za-z0-9]+).*/, "$1") + this.connect() + this.next_message_id = 1 + this.init() + } + + init() { + return this + } + + connect() { + this.target = window.parent + window.addEventListener('message', e => this.onMessage(e), false) + this.cmd(CMD_INNER_READY) + } + + onMessage(e) { + let message = e.data + let cmd = message.cmd + if (cmd === CMD_RESPONSE) { + if (this.waiting_cb[message.to] !== undefined) { + this.waiting_cb[message.to](message.result) + } + else { + this.log("Websocket callback not found:", message) + } + } else if (cmd === CMD_WRAPPER_READY) { + this.cmd(CMD_INNER_READY) + } else if (cmd === CMD_PING) { + this.response(message.id, CMD_PONG) + } else if (cmd === CMD_WRAPPER_OPENED_WEBSOCKET) { + this.onOpenWebsocket() + } else if (cmd === CMD_WRAPPER_CLOSE_WEBSOCKET) { + this.onCloseWebsocket() + } else { + this.onRequest(cmd, message) + } + } + + onRequest(cmd, message) { + this.log("Unknown request", message) + } + + response(to, result) { + this.send({ + cmd: CMD_RESPONSE, + to: to, + result: result + }) + } + + cmd(cmd, params={}, cb=null) { + this.send({ + cmd: cmd, + params: params + }, cb) + } + + cmdp(cmd, params={}) { + return new Promise((resolve, reject) => { + this.cmd(cmd, params, (res) => { + if (res && res.error) { + reject(res.error) + } else { + resolve(res) + } + }) + }) + } + + send(message, cb=null) { + message.wrapper_nonce = this.wrapper_nonce + message.id = this.next_message_id + this.next_message_id++ + this.target.postMessage(message, '*') + if (cb) { + this.waiting_cb[message.id] = cb + } + } + + log(...args) { + console.log.apply(console, ['[ZeroFrame]'].concat(args)) + } + + onOpenWebsocket() { + this.log('Websocket open') + } + + onCloseWebsocket() { + this.log('Websocket close') + } + + monkeyPatchAjax() { + var page = this + XMLHttpRequest.prototype.realOpen = XMLHttpRequest.prototype.open + this.cmd("wrapperGetAjaxKey", [], (res) => { this.ajax_key = res }) + var newOpen = function (method, url, async) { + url += "?ajax_key=" + page.ajax_key + return this.realOpen(method, url, async) + } + XMLHttpRequest.prototype.open = newOpen + } +} diff --git a/plugins/Cors/CorsPlugin.py b/plugins/Cors/CorsPlugin.py new file mode 100644 index 000000000..8d7589883 --- /dev/null +++ b/plugins/Cors/CorsPlugin.py @@ -0,0 +1,104 @@ +import re +import cgi +import copy + +from Plugin import PluginManager +from Translate import Translate +if "_" not in locals(): + _ = Translate("plugins/Cors/languages/") + + +def getCorsPath(site, inner_path): + match = re.match("^cors-([A-Za-z0-9]{26,35})/(.*)", inner_path) + if not match: + raise Exception("Invalid cors path: %s" % inner_path) + cors_address = match.group(1) + cors_inner_path = match.group(2) + + if not "Cors:%s" % cors_address in site.settings["permissions"]: + raise Exception("This site has no permission to access site %s" % cors_address) + + return cors_address, cors_inner_path + + +@PluginManager.registerTo("UiWebsocket") +class UiWebsocketPlugin(object): + def hasSitePermission(self, address, cmd=None): + if super(UiWebsocketPlugin, self).hasSitePermission(address, cmd=cmd): + return True + + if not "Cors:%s" % address in self.site.settings["permissions"] or cmd not in ["fileGet", "fileList", "dirList", "fileRules", "optionalFileInfo", "fileQuery", "dbQuery", "userGetSettings", "siteInfo"]: + return False + else: + return True + + # Add cors support for file commands + def corsFuncWrapper(self, func_name, to, inner_path, *args, **kwargs): + if inner_path.startswith("cors-"): + cors_address, cors_inner_path = getCorsPath(self.site, inner_path) + + req_self = copy.copy(self) + req_self.site = self.server.sites.get(cors_address) # Change the site to the merged one + if not req_self.site: + return {"error": "No site found"} + + func = getattr(super(UiWebsocketPlugin, req_self), func_name) + back = func(to, cors_inner_path, *args, **kwargs) + return back + else: + func = getattr(super(UiWebsocketPlugin, self), func_name) + return func(to, inner_path, *args, **kwargs) + + def actionFileGet(self, to, inner_path, *args, **kwargs): + return self.corsFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs) + + def actionFileList(self, to, inner_path, *args, **kwargs): + return self.corsFuncWrapper("actionFileList", to, inner_path, *args, **kwargs) + + def actionDirList(self, to, inner_path, *args, **kwargs): + return self.corsFuncWrapper("actionDirList", to, inner_path, *args, **kwargs) + + def actionFileRules(self, to, inner_path, *args, **kwargs): + return self.corsFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs) + + def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs): + return self.corsFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs) + + def actionCorsPermission(self, to, address): + site = self.server.sites.get(address) + if site: + site_name = site.content_manager.contents.get("content.json", {}).get("title") + button_title = _["Grant"] + else: + site_name = address + button_title = _["Grant & Add"] + + if site and "Cors:" + address in self.permissions: + return "ignored" + + self.cmd( + "confirm", + [_["This site requests read permission to: %s"] % cgi.escape(site_name), button_title], + lambda (res): self.cbCorsPermission(to, address) + ) + + def cbCorsPermission(self, to, address): + self.actionPermissionAdd(to, "Cors:" + address) + site = self.server.sites.get(address) + if not site: + self.server.site_manager.need(address) + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + # Allow to load cross origin files using /cors-address/file.jpg + def parsePath(self, path): + path_parts = super(UiRequestPlugin, self).parsePath(path) + if "cors-" not in path: # Optimization + return path_parts + site = self.server.sites[path_parts["address"]] + try: + path_parts["address"], path_parts["inner_path"] = getCorsPath(site, path_parts["inner_path"]) + except: + return None + return path_parts diff --git a/plugins/Cors/__init__.py b/plugins/Cors/__init__.py new file mode 100644 index 000000000..bca1ab3e3 --- /dev/null +++ b/plugins/Cors/__init__.py @@ -0,0 +1 @@ +import CorsPlugin \ No newline at end of file diff --git a/plugins/CryptMessage/CryptMessagePlugin.py b/plugins/CryptMessage/CryptMessagePlugin.py index 0302c83ac..71499ecad 100644 --- a/plugins/CryptMessage/CryptMessagePlugin.py +++ b/plugins/CryptMessage/CryptMessagePlugin.py @@ -53,7 +53,7 @@ def actionEciesDecrypt(self, to, param, privatekey=0): try: text = self.decrypt(encrypted_text.decode("base64"), privatekey) texts.append(text) - except Exception, err: + except Exception as err: texts.append(None) if type(param) == list: diff --git a/plugins/CryptMessage/Test/TestCrypt.py b/plugins/CryptMessage/Test/TestCrypt.py index 8e16cba23..f3098a769 100644 --- a/plugins/CryptMessage/Test/TestCrypt.py +++ b/plugins/CryptMessage/Test/TestCrypt.py @@ -14,7 +14,10 @@ def testPublickey(self, ui_websocket): assert ui_websocket.testAction("UserPublickey", 2) == ui_websocket.testAction("UserPublickey", 2) # Different publickey for different cert + site_data = ui_websocket.user.getSiteData(ui_websocket.site.address) + site_data["cert"] = None pub1 = ui_websocket.testAction("UserPublickey", 0) + site_data = ui_websocket.user.getSiteData(ui_websocket.site.address) site_data["cert"] = "zeroid.bit" pub2 = ui_websocket.testAction("UserPublickey", 0) diff --git a/plugins/FilePack/FilePackPlugin.py b/plugins/FilePack/FilePackPlugin.py new file mode 100644 index 000000000..8d662bba2 --- /dev/null +++ b/plugins/FilePack/FilePackPlugin.py @@ -0,0 +1,194 @@ +import os +import re + +import gevent + +from Plugin import PluginManager +from Config import config +from Debug import Debug + + +# Keep archive open for faster reponse times for large sites +archive_cache = {} + + +def closeArchive(archive_path): + if archive_path in archive_cache: + del archive_cache[archive_path] + + +def openArchive(archive_path, file_obj=None): + if archive_path not in archive_cache: + if archive_path.endswith("tar.gz"): + import tarfile + archive_cache[archive_path] = tarfile.open(file_obj or archive_path, "r:gz") + elif archive_path.endswith("tar.bz2"): + import tarfile + archive_cache[archive_path] = tarfile.open(file_obj or archive_path, "r:bz2") + else: + import zipfile + archive_cache[archive_path] = zipfile.ZipFile(file_obj or archive_path) + gevent.spawn_later(5, lambda: closeArchive(archive_path)) # Close after 5 sec + + archive = archive_cache[archive_path] + return archive + + +def openArchiveFile(archive_path, path_within, file_obj=None): + archive = openArchive(archive_path, file_obj=file_obj) + if archive_path.endswith(".zip"): + return archive.open(path_within) + else: + return archive.extractfile(path_within.encode("utf8")) + + +@PluginManager.registerTo("UiRequest") +class UiRequestPlugin(object): + def actionSiteMedia(self, path, **kwargs): + if ".zip/" in path or ".tar.gz/" in path: + file_obj = None + path_parts = self.parsePath(path) + file_path = u"%s/%s/%s" % (config.data_dir, path_parts["address"], path_parts["inner_path"].decode("utf8")) + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", file_path) + archive_path, path_within = match.groups() + if archive_path not in archive_cache: + site = self.server.site_manager.get(path_parts["address"]) + if not site: + return self.actionSiteAddPrompt(path) + archive_inner_path = site.storage.getInnerPath(archive_path) + if not os.path.isfile(archive_path): + # Wait until file downloads + result = site.needFile(archive_inner_path, priority=10) + # Send virutal file path download finished event to remove loading screen + site.updateWebsocket(file_done=archive_inner_path) + if not result: + return self.error404(archive_inner_path) + file_obj = site.storage.openBigfile(archive_inner_path) + + header_allow_ajax = False + if self.get.get("ajax_key"): + requester_site = self.server.site_manager.get(path_parts["request_address"]) + if self.get["ajax_key"] == requester_site.settings["ajax_key"]: + header_allow_ajax = True + else: + return self.error403("Invalid ajax_key") + + try: + file = openArchiveFile(archive_path, path_within, file_obj=file_obj) + content_type = self.getContentType(file_path) + self.sendHeader(200, content_type=content_type, noscript=kwargs.get("header_noscript", False), allow_ajax=header_allow_ajax) + return self.streamFile(file) + except Exception as err: + self.log.debug("Error opening archive file: %s" % Debug.formatException(err)) + return self.error404(path) + + return super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs) + + def streamFile(self, file): + for i in range(100): # Read max 6MB + try: + block = file.read(60 * 1024) + if block: + yield block + else: + raise StopIteration + except StopIteration: + file.close() + break + + +@PluginManager.registerTo("SiteStorage") +class SiteStoragePlugin(object): + def isFile(self, inner_path): + if ".zip/" in inner_path or ".tar.gz/" in inner_path: + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))/(.*)", inner_path) + archive_inner_path, path_within = match.groups() + return super(SiteStoragePlugin, self).isFile(archive_inner_path) + else: + return super(SiteStoragePlugin, self).isFile(inner_path) + + def openArchive(self, inner_path): + archive_path = self.getPath(inner_path) + file_obj = None + if archive_path not in archive_cache: + if not os.path.isfile(archive_path): + result = self.site.needFile(inner_path, priority=10) + self.site.updateWebsocket(file_done=inner_path) + if not result: + raise Exception("Unable to download file") + file_obj = self.site.storage.openBigfile(inner_path) + + try: + archive = openArchive(archive_path, file_obj=file_obj) + except Exception as err: + raise Exception("Unable to download file: %s" % err) + + return archive + + def walk(self, inner_path, *args, **kwags): + if ".zip" in inner_path or ".tar.gz" in inner_path: + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))(.*)", inner_path) + archive_inner_path, path_within = match.groups() + archive = self.openArchive(archive_inner_path) + path_within = path_within.lstrip("/") + + if archive_inner_path.endswith(".zip"): + namelist = [name for name in archive.namelist() if not name.endswith("/")] + else: + namelist = [item.name for item in archive.getmembers() if not item.isdir()] + + namelist_relative = [] + for name in namelist: + if not name.startswith(path_within): + continue + name_relative = name.replace(path_within, "", 1).rstrip("/") + namelist_relative.append(name_relative) + + return namelist_relative + + else: + return super(SiteStoragePlugin, self).walk(inner_path, *args, **kwags) + + def list(self, inner_path, *args, **kwags): + if ".zip" in inner_path or ".tar.gz" in inner_path: + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))(.*)", inner_path) + archive_inner_path, path_within = match.groups() + archive = self.openArchive(archive_inner_path) + path_within = path_within.lstrip("/") + + if archive_inner_path.endswith(".zip"): + namelist = [name for name in archive.namelist()] + else: + namelist = [item.name for item in archive.getmembers()] + + namelist_relative = [] + for name in namelist: + if not name.startswith(path_within): + continue + name_relative = name.replace(path_within, "", 1).rstrip("/") + + if "/" in name_relative: # File is in sub-directory + continue + + namelist_relative.append(name_relative) + return namelist_relative + + else: + return super(SiteStoragePlugin, self).list(inner_path, *args, **kwags) + + def read(self, inner_path, mode="r"): + if ".zip/" in inner_path or ".tar.gz/" in inner_path: + match = re.match("^(.*\.(?:tar.gz|tar.bz2|zip))(.*)", inner_path) + archive_inner_path, path_within = match.groups() + archive = self.openArchive(archive_inner_path) + path_within = path_within.lstrip("/") + print archive, archive_inner_path + + if archive_inner_path.endswith(".zip"): + return archive.open(path_within).read() + else: + return archive.extractfile(path_within.encode("utf8")).read() + + else: + return super(SiteStoragePlugin, self).read(inner_path, mode) + diff --git a/plugins/FilePack/__init__.py b/plugins/FilePack/__init__.py new file mode 100644 index 000000000..ab07a1ff5 --- /dev/null +++ b/plugins/FilePack/__init__.py @@ -0,0 +1 @@ +import FilePackPlugin \ No newline at end of file diff --git a/plugins/MergerSite/MergerSitePlugin.py b/plugins/MergerSite/MergerSitePlugin.py index ba018d86e..3de92a917 100644 --- a/plugins/MergerSite/MergerSitePlugin.py +++ b/plugins/MergerSite/MergerSitePlugin.py @@ -1,9 +1,12 @@ import re import time +import copy from Plugin import PluginManager +from Translate import Translate from util import RateLimit from util import helper +from Debug import Debug try: import OptionalManager.UiWebsocketPlugin # To make optioanlFileInfo merger sites compatible except Exception: @@ -15,6 +18,9 @@ merged_to_merger = {} # {address: [site1, site2, ...]} cache site_manager = None # Site manager for merger sites +if "_" not in locals(): + _ = Translate("plugins/MergerSite/languages/") + # Check if the site has permission to this merger site def checkMergerPath(address, inner_path): @@ -29,7 +35,10 @@ def checkMergerPath(address, inner_path): inner_path = re.sub("^merged-(.*?)/([A-Za-z0-9]{26,35})/", "", inner_path) return merged_address, inner_path else: - raise Exception("Merger site (%s) does not have permission for merged site: %s" % (merger_type, merged_address)) + raise Exception( + "Merger site (%s) does not have permission for merged site: %s (%s)" % + (merger_type, merged_address, merged_db.get(merged_address)) + ) else: raise Exception("No merger (%s) permission to load:
%s (%s not in %s)" % ( address, inner_path, merger_type, merger_db.get(address, [])) @@ -56,7 +65,7 @@ def actionMergerSiteAdd(self, to, addresses): else: self.cmd( "confirm", - ["Add %s new site?" % len(addresses), "Add"], + [_["Add %s new site?"] % len(addresses), "Add"], lambda (res): self.cbMergerSiteAdd(to, addresses) ) self.response(to, "ok") @@ -68,7 +77,7 @@ def cbMergerSiteAdd(self, to, addresses): added += 1 site_manager.need(address) if added: - self.cmd("notification", ["done", "Added %s new site" % added, 5000]) + self.cmd("notification", ["done", _["Added %s new site"] % added, 5000]) RateLimit.called(self.site.address + "-MergerSiteAdd") site_manager.updateMergerSites() @@ -84,7 +93,7 @@ def actionMergerSiteDelete(self, to, address): if merged_db.get(address) not in merger_types: return self.response(to, {"error": "Merged type (%s) not in %s" % (merged_db.get(address), merger_types)}) - self.cmd("notification", ["done", "Site deleted: %s" % address, 5000]) + self.cmd("notification", ["done", _["Site deleted: %s"] % address, 5000]) self.response(to, "ok") # Lists merged sites @@ -103,8 +112,8 @@ def actionMergerSiteList(self, to, query_site_info=False): ret[address] = merged_type self.response(to, ret) - def hasSitePermission(self, address): - if super(UiWebsocketPlugin, self).hasSitePermission(address): + def hasSitePermission(self, address, *args, **kwargs): + if super(UiWebsocketPlugin, self).hasSitePermission(address, *args, **kwargs): return True else: if self.site.address in [merger_site.address for merger_site in merged_to_merger.get(address, [])]: @@ -114,25 +123,29 @@ def hasSitePermission(self, address): # Add support merger sites for file commands def mergerFuncWrapper(self, func_name, to, inner_path, *args, **kwargs): - func = getattr(super(UiWebsocketPlugin, self), func_name) if inner_path.startswith("merged-"): merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path) # Set the same cert for merged site merger_cert = self.user.getSiteData(self.site.address).get("cert") - if merger_cert: + if merger_cert and self.user.getSiteData(merged_address).get("cert") != merger_cert: self.user.setCert(merged_address, merger_cert) - site_before = self.site # Save to be able to change it back after we ran the command - self.site = self.server.sites.get(merged_address) # Change the site to the merged one - try: - back = func(to, merged_inner_path, *args, **kwargs) - finally: - self.site = site_before # Change back to original site - return back + req_self = copy.copy(self) + req_self.site = self.server.sites.get(merged_address) # Change the site to the merged one + + func = getattr(super(UiWebsocketPlugin, req_self), func_name) + return func(to, merged_inner_path, *args, **kwargs) else: + func = getattr(super(UiWebsocketPlugin, self), func_name) return func(to, inner_path, *args, **kwargs) + def actionFileList(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileList", to, inner_path, *args, **kwargs) + + def actionDirList(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionDirList", to, inner_path, *args, **kwargs) + def actionFileGet(self, to, inner_path, *args, **kwargs): return self.mergerFuncWrapper("actionFileGet", to, inner_path, *args, **kwargs) @@ -145,12 +158,22 @@ def actionFileDelete(self, to, inner_path, *args, **kwargs): def actionFileRules(self, to, inner_path, *args, **kwargs): return self.mergerFuncWrapper("actionFileRules", to, inner_path, *args, **kwargs) + def actionFileNeed(self, to, inner_path, *args, **kwargs): + return self.mergerFuncWrapper("actionFileNeed", to, inner_path, *args, **kwargs) + def actionOptionalFileInfo(self, to, inner_path, *args, **kwargs): return self.mergerFuncWrapper("actionOptionalFileInfo", to, inner_path, *args, **kwargs) def actionOptionalFileDelete(self, to, inner_path, *args, **kwargs): return self.mergerFuncWrapper("actionOptionalFileDelete", to, inner_path, *args, **kwargs) + def actionBigfileUploadInit(self, to, inner_path, *args, **kwargs): + back = self.mergerFuncWrapper("actionBigfileUploadInit", to, inner_path, *args, **kwargs) + if inner_path.startswith("merged-"): + merged_address, merged_inner_path = checkMergerPath(self.site.address, inner_path) + back["inner_path"] = "merged-%s/%s/%s" % (merged_db[merged_address], merged_address, back["inner_path"]) + return back + # Add support merger sites for file commands with privatekey parameter def mergerFuncWrapperWithPrivatekey(self, func_name, to, privatekey, inner_path, *args, **kwargs): func = getattr(super(UiWebsocketPlugin, self), func_name) @@ -181,7 +204,30 @@ def actionSitePublish(self, to, privatekey=None, inner_path="content.json", *arg def actionPermissionAdd(self, to, permission): super(UiWebsocketPlugin, self).actionPermissionAdd(to, permission) - self.site.storage.rebuildDb() + if permission.startswith("Merger"): + self.site.storage.rebuildDb() + + def actionPermissionDetails(self, to, permission): + if not permission.startswith("Merger"): + return super(UiWebsocketPlugin, self).actionPermissionDetails(to, permission) + + merger_type = permission.replace("Merger:", "") + if not re.match("^[A-Za-z0-9-]+$", merger_type): + raise Exception("Invalid merger_type: %s" % merger_type) + merged_sites = [] + for address, merged_type in merged_db.iteritems(): + if merged_type != merger_type: + continue + site = self.server.sites.get(address) + try: + merged_sites.append(site.content_manager.contents.get("content.json").get("title", address)) + except Exception as err: + merged_sites.append(address) + + details = _["Read and write permissions to sites with merged type of %s "] % merger_type + details += _["(%s sites)"] % len(merged_sites) + details += "
%s
" % ", ".join(merged_sites) + self.response(to, details) @PluginManager.registerTo("UiRequest") @@ -214,27 +260,32 @@ def getDbFiles(self): for address, merged_type in merged_db.iteritems() if merged_type in merger_types ] + found = 0 for merged_site in merged_sites: + self.log.debug("Loading merged site: %s" % merged_site) merged_type = merged_db[merged_site.address] for content_inner_path, content in merged_site.content_manager.contents.iteritems(): # content.json file itself if merged_site.storage.isFile(content_inner_path): # Missing content.json file - content_path = self.getPath("merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path)) - yield content_path, merged_site.storage.open(content_inner_path) + merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, content_inner_path) + yield merged_inner_path, merged_site.storage.getPath(content_inner_path) else: merged_site.log.error("[MISSING] %s" % content_inner_path) # Data files in content.json content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site - for file_relative_path in content["files"].keys(): + for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): if not file_relative_path.endswith(".json"): continue # We only interesed in json files file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / if merged_site.storage.isFile(file_inner_path): - file_path = self.getPath("merged-%s/%s/%s" % (merged_type, merged_site.address, file_inner_path)) - yield file_path, merged_site.storage.open(file_inner_path) + merged_inner_path = "merged-%s/%s/%s" % (merged_type, merged_site.address, file_inner_path) + yield merged_inner_path, merged_site.storage.getPath(file_inner_path) else: merged_site.log.error("[MISSING] %s" % file_inner_path) + found += 1 + if found % 100 == 0: + time.sleep(0.000001) # Context switch to avoid UI block # Also notice merger sites on a merged site file change def onUpdated(self, inner_path, file=None): @@ -266,7 +317,6 @@ def fileDone(self, inner_path): for ws in merger_site.websockets: ws.event("siteChanged", self, {"event": ["file_done", inner_path]}) - def fileFailed(self, inner_path): super(SitePlugin, self).fileFailed(inner_path) @@ -287,9 +337,15 @@ def updateMergerSites(self): merged_db = {} merged_to_merger = {} site_manager = self + if not self.sites: + return for site in self.sites.itervalues(): # Update merged sites - merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type") + try: + merged_type = site.content_manager.contents.get("content.json", {}).get("merged_type") + except Exception, err: + self.log.error("Error loading site %s: %s" % (site.address, Debug.formatException(err))) + continue if merged_type: merged_db[site.address] = merged_type @@ -298,7 +354,10 @@ def updateMergerSites(self): if not permission.startswith("Merger:"): continue if merged_type: - self.log.error("Removing permission %s from %s: Merger and merged at the same time." % (permission, site.address)) + self.log.error( + "Removing permission %s from %s: Merger and merged at the same time." % + (permission, site.address) + ) site.settings["permissions"].remove(permission) continue merger_type = permission.replace("Merger:", "") diff --git a/plugins/MergerSite/languages/es.json b/plugins/MergerSite/languages/es.json new file mode 100644 index 000000000..d554c3a93 --- /dev/null +++ b/plugins/MergerSite/languages/es.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "¿Agregar %s nuevo sitio?", + "Added %s new site": "Sitio %s agregado", + "Site deleted: %s": "Sitio removido: %s" +} diff --git a/plugins/MergerSite/languages/fr.json b/plugins/MergerSite/languages/fr.json new file mode 100644 index 000000000..9d59fde95 --- /dev/null +++ b/plugins/MergerSite/languages/fr.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Ajouter le site %s ?", + "Added %s new site": "Site %s ajouté", + "Site deleted: %s": "Site %s supprimé" +} diff --git a/plugins/MergerSite/languages/hu.json b/plugins/MergerSite/languages/hu.json new file mode 100644 index 000000000..8e377aaa7 --- /dev/null +++ b/plugins/MergerSite/languages/hu.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Új oldal hozzáadása: %s?", + "Added %s new site": "Új oldal hozzáadva: %s", + "Site deleted: %s": "Oldal törölve: %s" +} diff --git a/plugins/MergerSite/languages/it.json b/plugins/MergerSite/languages/it.json new file mode 100644 index 000000000..d56c98174 --- /dev/null +++ b/plugins/MergerSite/languages/it.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Aggiungere %s nuovo sito ?", + "Added %s new site": "Sito %s aggiunto", + "Site deleted: %s": "Sito %s eliminato" +} diff --git a/plugins/MergerSite/languages/pt-br.json b/plugins/MergerSite/languages/pt-br.json new file mode 100644 index 000000000..cdc298cb0 --- /dev/null +++ b/plugins/MergerSite/languages/pt-br.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "Adicionar %s novo site?", + "Added %s new site": "Site %s adicionado", + "Site deleted: %s": "Site removido: %s" +} diff --git a/plugins/MergerSite/languages/tr.json b/plugins/MergerSite/languages/tr.json new file mode 100644 index 000000000..5afb39427 --- /dev/null +++ b/plugins/MergerSite/languages/tr.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "%s sitesi eklensin mi?", + "Added %s new site": "%s sitesi eklendi", + "Site deleted: %s": "%s sitesi silindi" +} diff --git a/plugins/MergerSite/languages/zh-tw.json b/plugins/MergerSite/languages/zh-tw.json new file mode 100644 index 000000000..a0684e63a --- /dev/null +++ b/plugins/MergerSite/languages/zh-tw.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "添加新網站: %s?", + "Added %s new site": "已添加到新網站:%s", + "Site deleted: %s": "網站已刪除:%s" +} diff --git a/plugins/MergerSite/languages/zh.json b/plugins/MergerSite/languages/zh.json new file mode 100644 index 000000000..127044e60 --- /dev/null +++ b/plugins/MergerSite/languages/zh.json @@ -0,0 +1,5 @@ +{ + "Add %s new site?": "添加新站点: %s?", + "Added %s new site": "已添加到新站点:%s", + "Site deleted: %s": "站点已删除:%s" +} diff --git a/plugins/Newsfeed/NewsfeedPlugin.py b/plugins/Newsfeed/NewsfeedPlugin.py index 3b911101a..4e54fae32 100644 --- a/plugins/Newsfeed/NewsfeedPlugin.py +++ b/plugins/Newsfeed/NewsfeedPlugin.py @@ -3,10 +3,21 @@ from Plugin import PluginManager from Db import DbQuery +from Debug import Debug +from util import helper @PluginManager.registerTo("UiWebsocket") class UiWebsocketPlugin(object): + def formatSiteInfo(self, site, create_user=True): + site_info = super(UiWebsocketPlugin, self).formatSiteInfo(site, create_user=create_user) + feed_following = self.user.sites.get(site.address, {}).get("follow", None) + if feed_following == None: + site_info["feed_follow_num"] = None + else: + site_info["feed_follow_num"] = len(feed_following) + return site_info + def actionFeedFollow(self, to, feeds): self.user.setFeedFollow(self.site.address, feeds) self.user.save() @@ -16,60 +27,104 @@ def actionFeedListFollow(self, to): feeds = self.user.sites[self.site.address].get("follow", {}) self.response(to, feeds) - def actionFeedQuery(self, to): + def actionFeedQuery(self, to, limit=10, day_limit=3): if "ADMIN" not in self.site.settings["permissions"]: return self.response(to, "FeedQuery not allowed") from Site import SiteManager rows = [] - for address, site_data in self.user.sites.iteritems(): + stats = [] + + total_s = time.time() + num_sites = 0 + + for address, site_data in self.user.sites.items(): feeds = site_data.get("follow") if not feeds: continue + if type(feeds) is not dict: + self.log.debug("Invalid feed for site %s" % address) + continue + num_sites += 1 for name, query_set in feeds.iteritems(): site = SiteManager.site_manager.get(address) + if not site or not site.storage.has_db: + continue + + s = time.time() try: - query, params = query_set - query_parts = query.split("UNION") + query_raw, params = query_set + query_parts = re.split(r"UNION(?:\s+ALL|)", query_raw) for i, query_part in enumerate(query_parts): db_query = DbQuery(query_part) - where = " WHERE %s > strftime('%%s', 'now', '-3 day')" % db_query.fields.get("date_added", "date_added") - if "WHERE" in query_part: - query_part = re.sub("WHERE (.*?)(?=$| GROUP BY)", where+" AND (\\1)", query_part) - else: - query_part += where + if day_limit: + where = " WHERE %s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit) + if "WHERE" in query_part: + query_part = re.sub("WHERE (.*?)(?=$| GROUP BY)", where+" AND (\\1)", query_part) + else: + query_part += where query_parts[i] = query_part query = " UNION ".join(query_parts) if ":params" in query: - query = query.replace(":params", ",".join(["?"] * len(params))) - res = site.storage.query(query + " ORDER BY date_added DESC LIMIT 10", params) - else: - res = site.storage.query(query + " ORDER BY date_added DESC LIMIT 10") - except Exception, err: # Log error - self.log.error("%s feed query %s error: %s" % (address, name, err)) + query_params = map(helper.sqlquote, params) + query = query.replace(":params", ",".join(query_params)) + + res = site.storage.query(query + " ORDER BY date_added DESC LIMIT %s" % limit) + + except Exception as err: # Log error + self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err))) + stats.append({"site": site.address, "feed_name": name, "error": str(err)}) continue + for row in res: row = dict(row) + if not isinstance(row["date_added"], (int, long, float, complex)): + self.log.debug("Invalid date_added from site %s: %r" % (address, row["date_added"])) + continue + if row["date_added"] > 1000000000000: # Formatted as millseconds + row["date_added"] = row["date_added"] / 1000 if "date_added" not in row or row["date_added"] > time.time() + 120: + self.log.debug("Newsfeed item from the future from from site %s" % address) continue # Feed item is in the future, skip it row["site"] = address row["feed_name"] = name rows.append(row) - return self.response(to, rows) - - def actionFeedSearch(self, to, search): + stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)}) + time.sleep(0.0001) + return self.response(to, {"rows": rows, "stats": stats, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3)}) + + def parseSearch(self, search): + parts = re.split("(site|type):", search) + if len(parts) > 1: # Found filter + search_text = parts[0] + parts = [part.strip() for part in parts] + filters = dict(zip(parts[1::2], parts[2::2])) + else: + search_text = search + filters = {} + return [search_text, filters] + + def actionFeedSearch(self, to, search, limit=30, day_limit=30): if "ADMIN" not in self.site.settings["permissions"]: return self.response(to, "FeedSearch not allowed") from Site import SiteManager rows = [] + stats = [] num_sites = 0 - s = time.time() + total_s = time.time() + + search_text, filters = self.parseSearch(search) + for address, site in SiteManager.site_manager.list().iteritems(): if not site.storage.has_db: continue + if "site" in filters: + if filters["site"].lower() not in [site.address, site.content_manager.contents["content.json"].get("title").lower()]: + continue + if site.storage.db: # Database loaded feeds = site.storage.db.schema.get("feeds") else: @@ -84,16 +139,33 @@ def actionFeedSearch(self, to, search): num_sites += 1 for name, query in feeds.iteritems(): + s = time.time() try: db_query = DbQuery(query) - db_query.wheres.append("%s LIKE ? OR %s LIKE ?" % (db_query.fields["body"], db_query.fields["title"])) + + params = [] + # Filters + if search_text: + db_query.wheres.append("(%s LIKE ? OR %s LIKE ?)" % (db_query.fields["body"], db_query.fields["title"])) + search_like = "%" + search_text.replace(" ", "%") + "%" + params.append(search_like) + params.append(search_like) + if filters.get("type") and filters["type"] not in query: + continue + + if day_limit: + db_query.wheres.append( + "%s > strftime('%%s', 'now', '-%s day')" % (db_query.fields.get("date_added", "date_added"), day_limit) + ) + + # Order db_query.parts["ORDER BY"] = "date_added DESC" - db_query.parts["LIMIT"] = "30" + db_query.parts["LIMIT"] = str(limit) - search_like = "%" + search.replace(" ", "%") + "%" - res = site.storage.query(str(db_query), [search_like, search_like]) + res = site.storage.query(str(db_query), params) except Exception, err: - self.log.error("%s feed query %s error: %s" % (address, name, err)) + self.log.error("%s feed query %s error: %s" % (address, name, Debug.formatException(err))) + stats.append({"site": site.address, "feed_name": name, "error": str(err), "query": query}) continue for row in res: row = dict(row) @@ -102,7 +174,8 @@ def actionFeedSearch(self, to, search): row["site"] = address row["feed_name"] = name rows.append(row) - return self.response(to, {"rows": rows, "num": len(rows), "sites": num_sites, "taken": time.time() - s}) + stats.append({"site": site.address, "feed_name": name, "taken": round(time.time() - s, 3)}) + return self.response(to, {"rows": rows, "num": len(rows), "sites": num_sites, "taken": round(time.time() - total_s, 3), "stats": stats}) @PluginManager.registerTo("User") diff --git a/plugins/OptionalManager/ContentDbPlugin.py b/plugins/OptionalManager/ContentDbPlugin.py index af0681c78..1a1f10afc 100644 --- a/plugins/OptionalManager/ContentDbPlugin.py +++ b/plugins/OptionalManager/ContentDbPlugin.py @@ -8,6 +8,7 @@ from util import helper from Plugin import PluginManager from Config import config +from Debug import Debug if "content_db" not in locals().keys(): # To keep between module reloads content_db = None @@ -48,7 +49,7 @@ def getSchema(self): ], "indexes": [ "CREATE UNIQUE INDEX file_optional_key ON file_optional (site_id, inner_path)", - "CREATE INDEX deletable ON file_optional (peer, is_downloaded) WHERE peer > 10" + "CREATE INDEX is_downloaded ON file_optional (is_downloaded)" ], "schema_changed": 11 } @@ -104,7 +105,7 @@ def loadFilesOptional(self): (num, float(total) / 1024 / 1024, float(total_downloaded) / 1024 / 1024, time.time() - s) ) - if self.need_filling and self.getOptionalLimitBytes() < total_downloaded: + if self.need_filling and self.getOptionalLimitBytes() >= 0 and self.getOptionalLimitBytes() < total_downloaded: limit_bytes = self.getOptionalLimitBytes() limit_new = round((float(total_downloaded) / 1024 / 1024 / 1024) * 1.1, 2) # Current limit + 10% self.log.debug( @@ -132,7 +133,7 @@ def fillTableFileOptional(self, site): content = site.content_manager.contents[row["inner_path"]] try: num += self.setContentFilesOptional(site, row["inner_path"], content, cur=cur) - except Exception, err: + except Exception as err: self.log.error("Error loading %s into file_optional: %s" % (row["inner_path"], err)) cur.execute("COMMIT") cur.close() @@ -157,7 +158,10 @@ def fillTableFileOptional(self, site): def setContentFilesOptional(self, site, content_inner_path, content, cur=None): if not cur: cur = self - cur.execute("BEGIN") + try: + cur.execute("BEGIN") + except Exception as err: + self.log.warning("Transaction begin error %s %s: %s" % (site, content_inner_path, Debug.formatException(err))) num = 0 site_id = self.site_ids[site.address] @@ -169,14 +173,13 @@ def setContentFilesOptional(self, site, content_inner_path, content, cur=None): is_downloaded = 1 else: is_downloaded = 0 - if site.address + "/" + file_inner_path in self.my_optional_files: + if site.address + "/" + content_inner_dir in self.my_optional_files: is_pinned = 1 else: is_pinned = 0 cur.insertOrUpdate("file_optional", { "hash_id": hash_id, - "size": int(file["size"]), - "is_pinned": is_pinned + "size": int(file["size"]) }, { "site_id": site_id, "inner_path": file_inner_path @@ -184,20 +187,23 @@ def setContentFilesOptional(self, site, content_inner_path, content, cur=None): "time_added": int(time.time()), "time_downloaded": int(time.time()) if is_downloaded else 0, "is_downloaded": is_downloaded, - "peer": is_downloaded + "peer": is_downloaded, + "is_pinned": is_pinned }) self.optional_files[site_id][file_inner_path[-8:]] = 1 num += 1 if cur == self: - cur.execute("END") - + try: + cur.execute("END") + except Exception as err: + self.log.warning("Transaction end error %s %s: %s" % (site, content_inner_path, Debug.formatException(err))) return num def setContent(self, site, inner_path, content, size=0): super(ContentDbPlugin, self).setContent(site, inner_path, content, size=size) old_content = site.content_manager.contents.get(inner_path, {}) - if (not self.need_filling or self.filled.get(site.address)) and "files_optional" in content or "files_optional" in old_content: + if (not self.need_filling or self.filled.get(site.address)) and ("files_optional" in content or "files_optional" in old_content): self.setContentFilesOptional(site, inner_path, content) # Check deleted files if old_content: @@ -229,6 +235,8 @@ def updatePeerNumbers(self): for site in self.sites.values(): if not site.content_manager.has_optional_files: continue + if not site.settings["serving"]: + continue has_updated_hashfield = next(( peer for peer in site.peers.itervalues() @@ -274,12 +282,12 @@ def updatePeerNumbers(self): self.log.debug("%s/%s peer number for %s site updated in %.3fs" % (num_updated, num_file, num_site, time.time() - s)) def queryDeletableFiles(self): - # First return the files with atleast 10 seeder and not accessed in last weed + # First return the files with atleast 10 seeder and not accessed in last week query = """ SELECT * FROM file_optional - WHERE peer > 10 AND is_downloaded = 1 AND is_pinned = 0 + WHERE peer > 10 AND %s ORDER BY time_accessed < %s DESC, uploaded / size - """ % int(time.time() - 60 * 60 * 7) + """ % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7)) limit_start = 0 while 1: num = 0 @@ -296,9 +304,9 @@ def queryDeletableFiles(self): # Then return files less seeder but still not accessed in last week query = """ SELECT * FROM file_optional - WHERE is_downloaded = 1 AND peer <= 10 AND is_pinned = 0 + WHERE peer <= 10 AND %s ORDER BY peer DESC, time_accessed < %s DESC, uploaded / size - """ % int(time.time() - 60 * 60 * 7) + """ % (self.getOptionalUsedWhere(), int(time.time() - 60 * 60 * 7)) limit_start = 0 while 1: num = 0 @@ -315,9 +323,9 @@ def queryDeletableFiles(self): # At the end return all files query = """ SELECT * FROM file_optional - WHERE is_downloaded = 1 AND peer <= 10 AND is_pinned = 0 + WHERE peer <= 10 AND %s ORDER BY peer DESC, time_accessed, uploaded / size - """ + """ % self.getOptionalUsedWhere() limit_start = 0 while 1: num = 0 @@ -337,15 +345,50 @@ def getOptionalLimitBytes(self): limit_bytes = float(re.sub("[^0-9.]", "", config.optional_limit)) * 1024 * 1024 * 1024 return limit_bytes + def getOptionalUsedWhere(self): + maxsize = config.optional_limit_exclude_minsize * 1024 * 1024 + query = "is_downloaded = 1 AND is_pinned = 0 AND size < %s" % maxsize + + # Don't delete optional files from owned sites + my_site_ids = [] + for address, site in self.sites.items(): + if site.settings["own"]: + my_site_ids.append(str(self.site_ids[address])) + + if my_site_ids: + query += " AND site_id NOT IN (%s)" % ", ".join(my_site_ids) + return query + + def getOptionalUsedBytes(self): + size = self.execute("SELECT SUM(size) FROM file_optional WHERE %s" % self.getOptionalUsedWhere()).fetchone()[0] + if not size: + size = 0 + return size + + def getOptionalNeedDelete(self, size): + if config.optional_limit.endswith("%"): + limit_percent = float(re.sub("[^0-9.]", "", config.optional_limit)) + need_delete = size - ((helper.getFreeSpace() + size) * (limit_percent / 100)) + else: + need_delete = size - self.getOptionalLimitBytes() + return need_delete + def checkOptionalLimit(self, limit=None): if not limit: limit = self.getOptionalLimitBytes() - size = self.execute("SELECT SUM(size) FROM file_optional WHERE is_downloaded = 1 AND is_pinned = 0").fetchone()[0] - if not size: - size = 0 - need_delete = size - limit - self.log.debug("Optional size: %.1fMB/%.1fMB" % (float(size) / 1024 / 1024, float(limit) / 1024 / 1024)) + if limit < 0: + self.log.debug("Invalid limit for optional files: %s" % limit) + return False + + size = self.getOptionalUsedBytes() + + need_delete = self.getOptionalNeedDelete(size) + + self.log.debug( + "Optional size: %.1fMB/%.1fMB, Need delete: %.1fMB" % + (float(size) / 1024 / 1024, float(limit) / 1024 / 1024, float(need_delete) / 1024 / 1024) + ) if need_delete <= 0: return False @@ -362,10 +405,10 @@ def checkOptionalLimit(self, limit=None): site.log.debug("Deleting %s %.3f MB left" % (row["inner_path"], float(need_delete) / 1024 / 1024)) deleted_file_ids.append(row["file_id"]) try: - site.content_manager.optionalRemove(row["inner_path"], row["hash_id"], row["size"]) + site.content_manager.optionalRemoved(row["inner_path"], row["hash_id"], row["size"]) site.storage.delete(row["inner_path"]) need_delete -= row["size"] - except Exception, err: + except Exception as err: site.log.error("Error deleting %s: %s" % (row["inner_path"], err)) if need_delete <= 0: diff --git a/plugins/OptionalManager/OptionalManagerPlugin.py b/plugins/OptionalManager/OptionalManagerPlugin.py index 687c87af5..9da930417 100644 --- a/plugins/OptionalManager/OptionalManagerPlugin.py +++ b/plugins/OptionalManager/OptionalManagerPlugin.py @@ -1,11 +1,21 @@ import time +import re import collections +import gevent + from util import helper from Plugin import PluginManager import ContentDbPlugin +# We can only import plugin host clases after the plugins are loaded +@PluginManager.afterLoad +def importPluginnedClasses(): + global config + from Config import config + + def processAccessLog(): if access_log: content_db = ContentDbPlugin.content_db @@ -44,20 +54,100 @@ def processRequestLog(): helper.timer(60, processRequestLog) +@PluginManager.registerTo("ContentManager") +class ContentManagerPlugin(object): + def __init__(self, *args, **kwargs): + self.cache_is_pinned = {} + super(ContentManagerPlugin, self).__init__(*args, **kwargs) + + def optionalDownloaded(self, inner_path, hash_id, size=None, own=False): + if "|" in inner_path: # Big file piece + file_inner_path, file_range = inner_path.split("|") + else: + file_inner_path = inner_path + + self.contents.db.executeDelayed( + "UPDATE file_optional SET time_downloaded = :now, is_downloaded = 1, peer = peer + 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 0", + {"now": int(time.time()), "site_id": self.contents.db.site_ids[self.site.address], "inner_path": file_inner_path} + ) + + return super(ContentManagerPlugin, self).optionalDownloaded(inner_path, hash_id, size, own) + + def optionalRemoved(self, inner_path, hash_id, size=None): + self.contents.db.execute( + "UPDATE file_optional SET is_downloaded = 0, is_pinned = 0, peer = peer - 1 WHERE site_id = :site_id AND inner_path = :inner_path AND is_downloaded = 1", + {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path} + ) + + if self.contents.db.cur.cursor.rowcount > 0: + back = super(ContentManagerPlugin, self).optionalRemoved(inner_path, hash_id, size) + # Re-add to hashfield if we have other file with the same hash_id + if self.isDownloaded(hash_id=hash_id, force_check_db=True): + self.hashfield.appendHashId(hash_id) + return back + + def isDownloaded(self, inner_path=None, hash_id=None, force_check_db=False): + if hash_id and not force_check_db and hash_id not in self.hashfield: + return False + + if inner_path: + res = self.contents.db.execute( + "SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1", + {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path} + ) + else: + res = self.contents.db.execute( + "SELECT is_downloaded FROM file_optional WHERE site_id = :site_id AND hash_id = :hash_id AND is_downloaded = 1 LIMIT 1", + {"site_id": self.contents.db.site_ids[self.site.address], "hash_id": hash_id} + ) + row = res.fetchone() + if row and row[0]: + return True + else: + return False + + def isPinned(self, inner_path): + if inner_path in self.cache_is_pinned: + self.site.log.debug("Cached is pinned: %s" % inner_path) + return self.cache_is_pinned[inner_path] + + res = self.contents.db.execute( + "SELECT is_pinned FROM file_optional WHERE site_id = :site_id AND inner_path = :inner_path LIMIT 1", + {"site_id": self.contents.db.site_ids[self.site.address], "inner_path": inner_path} + ) + row = res.fetchone() + + if row and row[0]: + is_pinned = True + else: + is_pinned = False + + self.cache_is_pinned[inner_path] = is_pinned + self.site.log.debug("Cache set is pinned: %s %s" % (inner_path, is_pinned)) + + return is_pinned + + def setPin(self, inner_path, is_pinned): + content_db = self.contents.db + site_id = content_db.site_ids[self.site.address] + content_db.execute("UPDATE file_optional SET is_pinned = %d WHERE ?" % is_pinned, {"site_id": site_id, "inner_path": inner_path}) + self.cache_is_pinned = {} + + def optionalDelete(self, inner_path): + if self.isPinned(inner_path): + self.site.log.debug("Skip deleting pinned optional file: %s" % inner_path) + return False + else: + return super(ContentManagerPlugin, self).optionalDelete(inner_path) + + @PluginManager.registerTo("WorkerManager") class WorkerManagerPlugin(object): def doneTask(self, task): - if task["optional_hash_id"]: - content_db = self.site.content_manager.contents.db - content_db.executeDelayed( - "UPDATE file_optional SET time_downloaded = :now, is_downloaded = 1, peer = peer + 1 WHERE site_id = :site_id AND inner_path = :inner_path", - {"now": int(time.time()), "site_id": content_db.site_ids[self.site.address], "inner_path": task["inner_path"]} - ) - super(WorkerManagerPlugin, self).doneTask(task) - if task["optional_hash_id"] and not self.tasks: - content_db.processDelayed() + if task["optional_hash_id"] and not self.tasks: # Execute delayed queries immedietly after tasks finished + ContentDbPlugin.content_db.processDelayed() @PluginManager.registerTo("UiRequest") @@ -107,11 +197,33 @@ def isDownloadable(self, inner_path): return False + def fileForgot(self, inner_path): + if "|" in inner_path and self.content_manager.isPinned(re.sub(r"\|.*", "", inner_path)): + self.log.debug("File %s is pinned, no fileForgot" % inner_path) + return False + else: + return super(SitePlugin, self).fileForgot(inner_path) + + def fileDone(self, inner_path): + if "|" in inner_path and self.bad_files.get(inner_path, 0) > 5: # Idle optional file done + inner_path_file = re.sub(r"\|.*", "", inner_path) + num_changed = 0 + for key, val in self.bad_files.items(): + if key.startswith(inner_path_file) and val > 1: + self.bad_files[key] = 1 + num_changed += 1 + self.log.debug("Idle optional file piece done, changed retry number of %s pieces." % num_changed) + if num_changed: + gevent.spawn(self.retryBadFiles) + + return super(SitePlugin, self).fileDone(inner_path) + @PluginManager.registerTo("ConfigPlugin") class ConfigPlugin(object): def createArguments(self): group = self.parser.add_argument_group("OptionalManager plugin") group.add_argument('--optional_limit', help='Limit total size of optional files', default="10%", metavar="GB or free space %") + group.add_argument('--optional_limit_exclude_minsize', help='Exclude files larger than this limit from optional size limit calculation', default=20, metavar="MB", type=int) return super(ConfigPlugin, self).createArguments() diff --git a/plugins/OptionalManager/Test/TestOptionalManager.py b/plugins/OptionalManager/Test/TestOptionalManager.py index a61aa0780..00a5fcb7d 100644 --- a/plugins/OptionalManager/Test/TestOptionalManager.py +++ b/plugins/OptionalManager/Test/TestOptionalManager.py @@ -1,11 +1,14 @@ import hashlib import os import copy +import json +from cStringIO import StringIO import pytest from OptionalManager import OptionalManagerPlugin from util import helper +from Crypt import CryptBitcoin @pytest.mark.usefixtures("resetSettings") @@ -40,3 +43,106 @@ def testDeleteContent(self, site): num_optional_files_before = contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] del contents["content.json"] assert contents.db.execute("SELECT COUNT(*) FROM file_optional").fetchone()[0] < num_optional_files_before + + def testVerifyFiles(self, site): + contents = site.content_manager.contents + + # Add new file + new_content = copy.deepcopy(contents["content.json"]) + new_content["files_optional"]["testfile"] = { + "size": 1234, + "sha512": "aaaabbbbcccc" + } + contents["content.json"] = new_content + file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone() + assert not file_row["is_downloaded"] + + # Write file from outside of ZeroNet + site.storage.open("testfile", "wb").write("A" * 1234) # For quick check hash does not matter only file size + + hashfield_len_before = len(site.content_manager.hashfield) + site.storage.verifyFiles(quick_check=True) + assert len(site.content_manager.hashfield) == hashfield_len_before + 1 + + file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone() + assert file_row["is_downloaded"] + + # Delete file outside of ZeroNet + site.storage.delete("testfile") + site.storage.verifyFiles(quick_check=True) + file_row = contents.db.execute("SELECT * FROM file_optional WHERE inner_path = 'testfile'").fetchone() + assert not file_row["is_downloaded"] + + def testVerifyFilesSameHashId(self, site): + contents = site.content_manager.contents + + new_content = copy.deepcopy(contents["content.json"]) + + # Add two files with same hashid (first 4 character) + new_content["files_optional"]["testfile1"] = { + "size": 1234, + "sha512": "aaaabbbbcccc" + } + new_content["files_optional"]["testfile2"] = { + "size": 2345, + "sha512": "aaaabbbbdddd" + } + contents["content.json"] = new_content + + assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") == site.content_manager.hashfield.getHashId("aaaabbbbdddd") + + # Write files from outside of ZeroNet (For quick check hash does not matter only file size) + site.storage.open("testfile1", "wb").write("A" * 1234) + site.storage.open("testfile2", "wb").write("B" * 2345) + + site.storage.verifyFiles(quick_check=True) + + # Make sure that both is downloaded + assert site.content_manager.isDownloaded("testfile1") + assert site.content_manager.isDownloaded("testfile2") + assert site.content_manager.hashfield.getHashId("aaaabbbbcccc") in site.content_manager.hashfield + + # Delete one of the files + site.storage.delete("testfile1") + site.storage.verifyFiles(quick_check=True) + assert not site.content_manager.isDownloaded("testfile1") + assert site.content_manager.isDownloaded("testfile2") + assert site.content_manager.hashfield.getHashId("aaaabbbbdddd") in site.content_manager.hashfield + + def testIsPinned(self, site): + assert not site.content_manager.isPinned("data/img/zerotalk-upvote.png") + site.content_manager.setPin("data/img/zerotalk-upvote.png", True) + assert site.content_manager.isPinned("data/img/zerotalk-upvote.png") + + assert len(site.content_manager.cache_is_pinned) == 1 + site.content_manager.cache_is_pinned = {} + assert site.content_manager.isPinned("data/img/zerotalk-upvote.png") + + def testBigfilePieceReset(self, site): + site.bad_files = { + "data/fake_bigfile.mp4|0-1024": 10, + "data/fake_bigfile.mp4|1024-2048": 10, + "data/fake_bigfile.mp4|2048-3064": 10 + } + site.onFileDone("data/fake_bigfile.mp4|0-1024") + assert site.bad_files["data/fake_bigfile.mp4|1024-2048"] == 1 + assert site.bad_files["data/fake_bigfile.mp4|2048-3064"] == 1 + + def testOptionalDelete(self, site): + privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" + contents = site.content_manager.contents + + site.content_manager.setPin("data/img/zerotalk-upvote.png", True) + site.content_manager.setPin("data/img/zeroid.png", False) + new_content = copy.deepcopy(contents["content.json"]) + del new_content["files_optional"]["data/img/zerotalk-upvote.png"] + del new_content["files_optional"]["data/img/zeroid.png"] + + assert site.storage.isFile("data/img/zerotalk-upvote.png") + assert site.storage.isFile("data/img/zeroid.png") + + site.storage.writeJson("content.json", new_content) + site.content_manager.loadContent("content.json", force=True) + + assert not site.storage.isFile("data/img/zeroid.png") + assert site.storage.isFile("data/img/zerotalk-upvote.png") diff --git a/plugins/OptionalManager/UiWebsocketPlugin.py b/plugins/OptionalManager/UiWebsocketPlugin.py index ddce352b2..879fb0add 100644 --- a/plugins/OptionalManager/UiWebsocketPlugin.py +++ b/plugins/OptionalManager/UiWebsocketPlugin.py @@ -7,6 +7,12 @@ from Plugin import PluginManager from Config import config from util import helper +from Translate import Translate + +if "_" not in locals(): + _ = Translate("plugins/OptionalManager/languages/") + +bigfile_sha512_cache = {} @PluginManager.registerTo("UiWebsocket") @@ -15,12 +21,11 @@ def __init__(self, *args, **kwargs): self.time_peer_numbers_updated = 0 super(UiWebsocketPlugin, self).__init__(*args, **kwargs) - def actionFileWrite(self, to, inner_path, *args, **kwargs): - super(UiWebsocketPlugin, self).actionFileWrite(to, inner_path, *args, **kwargs) - + def actionSiteSign(self, to, privatekey=None, inner_path="content.json", *args, **kwargs): # Add file to content.db and set it as pinned content_db = self.site.content_manager.contents.db - content_db.my_optional_files[self.site.address + "/" + inner_path] = time.time() + content_inner_dir = helper.getDirname(inner_path) + content_db.my_optional_files[self.site.address + "/" + content_inner_dir] = time.time() if len(content_db.my_optional_files) > 50: # Keep only last 50 oldest_key = min( content_db.my_optional_files.iterkeys(), @@ -28,14 +33,76 @@ def actionFileWrite(self, to, inner_path, *args, **kwargs): ) del content_db.my_optional_files[oldest_key] + return super(UiWebsocketPlugin, self).actionSiteSign(to, privatekey, inner_path, *args, **kwargs) + def updatePeerNumbers(self): + self.site.updateHashfield() content_db = self.site.content_manager.contents.db content_db.updatePeerNumbers() self.site.updateWebsocket(peernumber_updated=True) + def addBigfileInfo(self, row): + global bigfile_sha512_cache + + content_db = self.site.content_manager.contents.db + site = content_db.sites[row["address"]] + if not site.settings.get("has_bigfile"): + return False + + file_key = row["address"] + "/" + row["inner_path"] + sha512 = bigfile_sha512_cache.get(file_key) + file_info = None + if not sha512: + file_info = site.content_manager.getFileInfo(row["inner_path"]) + if not file_info or not file_info.get("piece_size"): + return False + sha512 = file_info["sha512"] + bigfile_sha512_cache[file_key] = sha512 + + if sha512 in site.storage.piecefields: + piecefield = site.storage.piecefields[sha512].tostring() + else: + piecefield = None + + if piecefield: + row["pieces"] = len(piecefield) + row["pieces_downloaded"] = piecefield.count("1") + row["downloaded_percent"] = 100 * row["pieces_downloaded"] / row["pieces"] + if row["pieces_downloaded"]: + if not file_info: + file_info = site.content_manager.getFileInfo(row["inner_path"]) + row["bytes_downloaded"] = row["pieces_downloaded"] * file_info.get("piece_size", 0) + else: + row["bytes_downloaded"] = 0 + + row["is_downloading"] = bool(next((inner_path for inner_path in site.bad_files if inner_path.startswith(row["inner_path"])), False)) + + # Add leech / seed stats + row["peer_seed"] = 0 + row["peer_leech"] = 0 + for peer in site.peers.itervalues(): + if not peer.time_piecefields_updated or sha512 not in peer.piecefields: + continue + peer_piecefield = peer.piecefields[sha512].tostring() + if not peer_piecefield: + continue + if peer_piecefield == "1" * len(peer_piecefield): + row["peer_seed"] += 1 + else: + row["peer_leech"] += 1 + + # Add myself + if piecefield: + if row["pieces_downloaded"] == row["pieces"]: + row["peer_seed"] += 1 + else: + row["peer_leech"] += 1 + + return True + # Optional file functions - def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC", limit=10): + def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC", limit=10, filter="downloaded"): if not address: address = self.site.address @@ -46,8 +113,11 @@ def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC self.time_peer_numbers_updated = time.time() gevent.spawn(self.updatePeerNumbers) - if address != self.site.address and "ADMIN" not in self.site.settings["permissions"]: - return self.response(to, "optionalSiteInfo not allowed on this site") + if address == "all" and "ADMIN" not in self.permissions: + return self.response(to, {"error": "Forbidden"}) + + if not self.hasSitePermission(address): + return self.response(to, {"error": "Forbidden"}) if not all([re.match("^[a-z_*/+-]+( DESC| ASC|)$", part.strip()) for part in orderby.split(",")]): return self.response(to, "Invalid order_by") @@ -57,10 +127,52 @@ def actionOptionalFileList(self, to, address=None, orderby="time_downloaded DESC back = [] content_db = self.site.content_manager.contents.db - site_id = content_db.site_ids[address] - query = "SELECT * FROM file_optional WHERE site_id = %s AND is_downloaded = 1 ORDER BY %s LIMIT %s" % (site_id, orderby, limit) - for row in content_db.execute(query): - back.append(dict(row)) + + wheres = {} + wheres_raw = [] + if "bigfile" in filter: + wheres["size >"] = 1024 * 1024 * 10 + + if "not_downloaded" in filter: + wheres["is_downloaded"] = 0 + elif "downloaded" in filter: + wheres_raw.append("(is_downloaded = 1 OR is_pinned = 1)") + + if "pinned" in filter: + wheres["is_pinned"] = 1 + + if address == "all": + join = "LEFT JOIN site USING (site_id)" + else: + wheres["site_id"] = content_db.site_ids[address] + join = "" + + if wheres_raw: + query_wheres_raw = "AND" + " AND ".join(wheres_raw) + else: + query_wheres_raw = "" + + query = "SELECT * FROM file_optional %s WHERE ? %s ORDER BY %s LIMIT %s" % (join, query_wheres_raw, orderby, limit) + + for row in content_db.execute(query, wheres): + row = dict(row) + if address != "all": + row["address"] = address + + if row["size"] > 1024 * 1024: + has_info = self.addBigfileInfo(row) + else: + has_info = False + + if not has_info: + if row["is_downloaded"]: + row["bytes_downloaded"] = row["size"] + row["downloaded_percent"] = 100 + else: + row["bytes_downloaded"] = 0 + row["downloaded_percent"] = 0 + + back.append(row) self.response(to, back) def actionOptionalFileInfo(self, to, inner_path): @@ -77,7 +189,11 @@ def actionOptionalFileInfo(self, to, inner_path): res = content_db.execute(query, {"site_id": site_id, "inner_path": inner_path}) row = next(res, None) if row: - self.response(to, dict(row)) + row = dict(row) + if row["size"] > 1024 * 1024: + row["address"] = self.site.address + self.addBigfileInfo(row) + self.response(to, row) else: self.response(to, None) @@ -89,23 +205,32 @@ def setPin(self, inner_path, is_pinned, address=None): return {"error": "Forbidden"} site = self.server.sites[address] - - content_db = site.content_manager.contents.db - site_id = content_db.site_ids[site.address] - content_db.execute("UPDATE file_optional SET is_pinned = %s WHERE ?" % is_pinned, {"site_id": site_id, "inner_path": inner_path}) + site.content_manager.setPin(inner_path, is_pinned) return "ok" def actionOptionalFilePin(self, to, inner_path, address=None): + if type(inner_path) is not list: + inner_path = [inner_path] back = self.setPin(inner_path, 1, address) + num_file = len(inner_path) if back == "ok": - self.cmd("notification", ["done", "Pinned %s files" % len(inner_path) if type(inner_path) is list else 1, 5000]) + if num_file == 1: + self.cmd("notification", ["done", _["Pinned %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000]) + else: + self.cmd("notification", ["done", _["Pinned %s files"] % num_file, 5000]) self.response(to, back) def actionOptionalFileUnpin(self, to, inner_path, address=None): + if type(inner_path) is not list: + inner_path = [inner_path] back = self.setPin(inner_path, 0, address) + num_file = len(inner_path) if back == "ok": - self.cmd("notification", ["done", "Removed pin from %s files" % len(inner_path) if type(inner_path) is list else 1, 5000]) + if num_file == 1: + self.cmd("notification", ["done", _["Removed pin from %s"] % cgi.escape(helper.getFilename(inner_path[0])), 5000]) + else: + self.cmd("notification", ["done", _["Removed pin from %s files"] % num_file, 5000]) self.response(to, back) def actionOptionalFileDelete(self, to, inner_path, address=None): @@ -120,13 +245,13 @@ def actionOptionalFileDelete(self, to, inner_path, address=None): content_db = site.content_manager.contents.db site_id = content_db.site_ids[site.address] - res = content_db.execute("SELECT * FROM file_optional WHERE ? LIMIT 1", {"site_id": site_id, "inner_path": inner_path}) + res = content_db.execute("SELECT * FROM file_optional WHERE ? LIMIT 1", {"site_id": site_id, "inner_path": inner_path, "is_downloaded": 1}) row = next(res, None) if not row: return self.response(to, {"error": "Not found in content.db"}) - removed = site.content_manager.optionalRemove(inner_path, row["hash_id"], row["size"]) + removed = site.content_manager.optionalRemoved(inner_path, row["hash_id"], row["size"]) # if not removed: # return self.response(to, {"error": "Not found in hash_id: %s" % row["hash_id"]}) @@ -134,11 +259,14 @@ def actionOptionalFileDelete(self, to, inner_path, address=None): try: site.storage.delete(inner_path) - except Exception, err: + except Exception as err: return self.response(to, {"error": "File delete error: %s" % err}) + site.updateWebsocket(file_delete=inner_path) - self.response(to, "ok") + if inner_path in site.content_manager.cache_is_pinned: + site.content_manager.cache_is_pinned = {} + self.response(to, "ok") # Limit functions @@ -148,9 +276,7 @@ def actionOptionalLimitStats(self, to): back = {} back["limit"] = config.optional_limit - back["used"] = self.site.content_manager.contents.db.execute( - "SELECT SUM(size) FROM file_optional WHERE is_downloaded = 1 AND is_pinned = 0" - ).fetchone()[0] + back["used"] = self.site.content_manager.contents.db.getOptionalUsedBytes() back["free"] = helper.getFreeSpace() self.response(to, back) @@ -202,7 +328,7 @@ def actionOptionalHelp(self, to, directory, title, address=None): self.cmd("notification", [ "done", - "You started to help distribute %s.
Directory: %s" % + _["You started to help distribute %s.
Directory: %s"] % (cgi.escape(title), cgi.escape(directory)), 10000 ]) @@ -247,8 +373,8 @@ def actionOptionalHelpAll(self, to, value, address=None): self.cmd( "confirm", [ - "Help distribute all new optional files on site %s" % cgi.escape(site_title), - "Yes, I want to help!" + _["Help distribute all new optional files on site %s"] % cgi.escape(site_title), + _["Yes, I want to help!"] ], lambda (res): self.cbOptionalHelpAll(to, site, True) ) diff --git a/plugins/OptionalManager/languages/es.json b/plugins/OptionalManager/languages/es.json new file mode 100644 index 000000000..32ae46aef --- /dev/null +++ b/plugins/OptionalManager/languages/es.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "Archivos %s fijados", + "Removed pin from %s files": "Archivos %s que no estan fijados", + "You started to help distribute %s.
Directory: %s": "Tu empezaste a ayudar a distribuir %s.
Directorio: %s", + "Help distribute all new optional files on site %s": "Ayude a distribuir todos los archivos opcionales en el sitio %s", + "Yes, I want to help!": "¡Si, yo quiero ayudar!" +} diff --git a/plugins/OptionalManager/languages/fr.json b/plugins/OptionalManager/languages/fr.json new file mode 100644 index 000000000..47a563dc9 --- /dev/null +++ b/plugins/OptionalManager/languages/fr.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "Fichiers %s épinglés", + "Removed pin from %s files": "Fichiers %s ne sont plus épinglés", + "You started to help distribute %s.
Directory: %s": "Vous avez commencé à aider à distribuer %s.
Dossier : %s", + "Help distribute all new optional files on site %s": "Aider à distribuer tous les fichiers optionnels du site %s", + "Yes, I want to help!": "Oui, je veux aider !" +} diff --git a/plugins/OptionalManager/languages/hu.json b/plugins/OptionalManager/languages/hu.json new file mode 100644 index 000000000..7a23b86c1 --- /dev/null +++ b/plugins/OptionalManager/languages/hu.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "%s fájl rögzítve", + "Removed pin from %s files": "%s fájl rögzítés eltávolítva", + "You started to help distribute %s.
Directory: %s": "Új segítség a terjesztésben: %s.
Könyvtár: %s", + "Help distribute all new optional files on site %s": "Segítség az összes új opcionális fájl terjesztésében az %s oldalon", + "Yes, I want to help!": "Igen, segíteni akarok!" +} diff --git a/plugins/OptionalManager/languages/pt-br.json b/plugins/OptionalManager/languages/pt-br.json new file mode 100644 index 000000000..21d90cc0a --- /dev/null +++ b/plugins/OptionalManager/languages/pt-br.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "Arquivos %s fixados", + "Removed pin from %s files": "Arquivos %s não estão fixados", + "You started to help distribute %s.
Directory: %s": "Você começou a ajudar a distribuir %s.
Pasta: %s", + "Help distribute all new optional files on site %s": "Ajude a distribuir todos os novos arquivos opcionais no site %s", + "Yes, I want to help!": "Sim, eu quero ajudar!" +} diff --git a/plugins/OptionalManager/languages/zh-tw.json b/plugins/OptionalManager/languages/zh-tw.json new file mode 100644 index 000000000..dfa9eaf36 --- /dev/null +++ b/plugins/OptionalManager/languages/zh-tw.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "已固定 %s 個檔", + "Removed pin from %s files": "已解除固定 %s 個檔", + "You started to help distribute %s.
Directory: %s": "你已經開始幫助分發 %s
目錄:%s", + "Help distribute all new optional files on site %s": "你想要幫助分發 %s 網站的所有檔嗎?", + "Yes, I want to help!": "是,我想要幫助!" +} diff --git a/plugins/OptionalManager/languages/zh.json b/plugins/OptionalManager/languages/zh.json new file mode 100644 index 000000000..ae18118e2 --- /dev/null +++ b/plugins/OptionalManager/languages/zh.json @@ -0,0 +1,7 @@ +{ + "Pinned %s files": "已固定 %s 个文件", + "Removed pin from %s files": "已解除固定 %s 个文件", + "You started to help distribute %s.
Directory: %s": "您已经开始帮助分发 %s
目录:%s", + "Help distribute all new optional files on site %s": "您想要帮助分发 %s 站点的所有文件吗?", + "Yes, I want to help!": "是,我想要帮助!" +} diff --git a/plugins/PeerDb/PeerDbPlugin.py b/plugins/PeerDb/PeerDbPlugin.py index 87a8eb851..241b5c587 100644 --- a/plugins/PeerDb/PeerDbPlugin.py +++ b/plugins/PeerDb/PeerDbPlugin.py @@ -22,12 +22,14 @@ def getSchema(self): ["address", "TEXT NOT NULL"], ["port", "INTEGER NOT NULL"], ["hashfield", "BLOB"], - ["time_added", "INTEGER NOT NULL"] + ["reputation", "INTEGER NOT NULL"], + ["time_added", "INTEGER NOT NULL"], + ["time_found", "INTEGER NOT NULL"] ], "indexes": [ "CREATE UNIQUE INDEX peer_key ON peer (site_id, address, port)" ], - "schema_changed": 1 + "schema_changed": 2 } return schema @@ -39,40 +41,53 @@ def loadPeers(self, site): num = 0 num_hashfield = 0 for row in res: - peer = site.addPeer(row["address"], row["port"]) + peer = site.addPeer(str(row["address"]), row["port"]) if not peer: # Already exist continue if row["hashfield"]: peer.hashfield.replaceFromString(row["hashfield"]) num_hashfield += 1 peer.time_added = row["time_added"] + peer.time_found = row["time_found"] + peer.reputation = row["reputation"] + if row["address"].endswith(".onion"): + peer.reputation = peer.reputation / 2 - 1 # Onion peers less likely working num += 1 + if num_hashfield: + site.content_manager.has_optional_files = True site.log.debug("%s peers (%s with hashfield) loaded in %.3fs" % (num, num_hashfield, time.time() - s)) def iteratePeers(self, site): site_id = self.site_ids.get(site.address) for key, peer in site.peers.iteritems(): - address, port = key.split(":") + address, port = key.rsplit(":", 1) if peer.has_hashfield: hashfield = sqlite3.Binary(peer.hashfield.tostring()) else: hashfield = "" - yield (site_id, address, port, hashfield, int(peer.time_added)) + yield (site_id, address, port, hashfield, peer.reputation, int(peer.time_added), int(peer.time_found)) def savePeers(self, site, spawn=False): if spawn: # Save peers every hour (+random some secs to not update very site at same time) gevent.spawn_later(60 * 60 + random.randint(0, 60), self.savePeers, site, spawn=True) + if not site.peers: + site.log.debug("Peers not saved: No peers found") + return s = time.time() site_id = self.site_ids.get(site.address) cur = self.getCursor() cur.execute("BEGIN") - self.execute("DELETE FROM peer WHERE site_id = :site_id", {"site_id": site_id}) - self.cur.cursor.executemany( - "INSERT INTO peer (site_id, address, port, hashfield, time_added) VALUES (?, ?, ?, ?, ?)", - self.iteratePeers(site) - ) - cur.execute("END") + try: + cur.execute("DELETE FROM peer WHERE site_id = :site_id", {"site_id": site_id}) + cur.cursor.executemany( + "INSERT INTO peer (site_id, address, port, hashfield, reputation, time_added, time_found) VALUES (?, ?, ?, ?, ?, ?, ?)", + self.iteratePeers(site) + ) + except Exception as err: + site.log.error("Save peer error: %s" % err) + finally: + cur.execute("END") site.log.debug("Peers saved in %.3fs" % (time.time() - s)) def initSite(self, site): diff --git a/plugins/Sidebar/SidebarPlugin.py b/plugins/Sidebar/SidebarPlugin.py index ca9e60855..c56a2cb4e 100644 --- a/plugins/Sidebar/SidebarPlugin.py +++ b/plugins/Sidebar/SidebarPlugin.py @@ -15,13 +15,17 @@ from Config import config from Plugin import PluginManager from Debug import Debug +from Translate import Translate from util import helper +from ZipStream import ZipStream plugin_dir = "plugins/Sidebar" media_dir = plugin_dir + "/media" sys.path.append(plugin_dir) # To able to load geoip lib loc_cache = {} +if "_" not in locals(): + _ = Translate(plugin_dir + "/languages/") @PluginManager.registerTo("UiRequest") @@ -41,8 +45,11 @@ def actionUiMedia(self, path): # If debugging merge *.css to all.css and *.js to all.js from Debug import DebugMedia DebugMedia.merge(plugin_media_file) - for part in self.actionFile(plugin_media_file, send_header=False): - yield part + if ext == "js": + yield _.translateData(open(plugin_media_file).read()) + else: + for part in self.actionFile(plugin_media_file, send_header=False): + yield part elif path.startswith("/uimedia/globe/"): # Serve WebGL globe files file_name = re.match(".*/(.*)", path).group(1) plugin_media_file = "%s-globe/%s" % (media_dir, file_name) @@ -56,38 +63,86 @@ def actionUiMedia(self, path): for part in super(UiRequestPlugin, self).actionUiMedia(path): yield part + def actionZip(self): + address = self.get["address"] + site = self.server.site_manager.get(address) + if not site: + return self.error404("Site not found") + + title = site.content_manager.contents.get("content.json", {}).get("title", "").encode('ascii', 'ignore') + filename = "%s-backup-%s.zip" % (title, time.strftime("%Y-%m-%d_%H_%M")) + self.sendHeader(content_type="application/zip", extra_headers={'Content-Disposition': 'attachment; filename="%s"' % filename}) + + return self.streamZip(site.storage.getPath(".")) + + def streamZip(self, file_path): + zs = ZipStream(file_path) + while 1: + data = zs.read() + if not data: + break + yield data + + + @PluginManager.registerTo("UiWebsocket") class UiWebsocketPlugin(object): - def sidebarRenderPeerStats(self, body, site): connected = len([peer for peer in site.peers.values() if peer.connection and peer.connection.connected]) connectable = len([peer_id for peer_id in site.peers.keys() if not peer_id.endswith(":0")]) onion = len([peer_id for peer_id in site.peers.keys() if ".onion" in peer_id]) + local = len([peer for peer in site.peers.values() if helper.isPrivateIp(peer.ip)]) peers_total = len(site.peers) + + # Add myself + if site.settings["serving"]: + peers_total += 1 + if any(site.connection_server.port_opened.values()): + connectable += 1 + if site.connection_server.tor_manager.start_onions: + onion += 1 + if peers_total: percent_connected = float(connected) / peers_total percent_connectable = float(connectable) / peers_total percent_onion = float(onion) / peers_total else: percent_connectable = percent_connected = percent_onion = 0 - body.append(""" + + if local: + local_html = _(u"
  • {_[Local]}:{local}
  • ") + else: + local_html = "" + + peer_ips = [peer.key for peer in site.getConnectablePeers(20, allow_private=False)] + peer_ips.sort(key=lambda peer_ip: ".onion:" in peer_ip) + copy_link = "http://127.0.0.1:43110/%s/?zeronet_peers=%s" % ( + site.content_manager.contents["content.json"].get("domain", site.address), + ",".join(peer_ips) + ) + + body.append(_(u"""
  • - +
      -
    • -
    • -
    • -
    • +
    • +
    • +
    • +
      -
    • connected:{connected}
    • -
    • Connectable:{connectable}
    • -
    • Onion:{onion}
    • -
    • Total:{peers_total}
    • +
    • {_[Connected]}:{connected}
    • +
    • {_[Connectable]}:{connectable}
    • +
    • {_[Onion]}:{onion}
    • + {local_html} +
    • {_[Total]}:{peers_total}
  • - """.format(**locals())) + """.replace("{local_html}", local_html))) def sidebarRenderTransferStats(self, body, site): recv = float(site.settings.get("bytes_recv", 0)) / 1024 / 1024 @@ -99,32 +154,41 @@ def sidebarRenderTransferStats(self, body, site): else: percent_recv = 0.5 percent_sent = 0.5 - body.append(""" + + body.append(_(u"""
  • - +
      -
    • -
    • +
    • +
      -
    • Received:{recv:.2f}MB
    • -
    • Sent:{sent:.2f}MB
    • +
    • {_[Received]}:{recv:.2f}MB
    • +
    • {_[Sent]}:{sent:.2f}MB
  • - """.format(**locals())) + """)) def sidebarRenderFileStats(self, body, site): - body.append("
    • ") + body.append(_(u""" +
    • + +
        + """)) extensions = ( ("html", "yellow"), ("css", "orange"), ("js", "purple"), - ("image", "green"), + ("Image", "green"), ("json", "darkblue"), - ("user data", "blue"), - ("other", "white"), - ("total", "black") + ("User data", "blue"), + ("Other", "white"), + ("Total", "black") ) # Collect stats size_filetypes = {} @@ -132,7 +196,7 @@ def sidebarRenderFileStats(self, body, site): contents = site.content_manager.listContents() # Without user files for inner_path in contents: content = site.content_manager.contents[inner_path] - if "files" not in content: + if "files" not in content or content["files"] is None: continue for file_name, file_details in content["files"].items(): size_total += file_details["size"] @@ -146,7 +210,7 @@ def sidebarRenderFileStats(self, body, site): ).fetchone()["size"] if not size_user_content: size_user_content = 0 - size_filetypes["user data"] = size_user_content + size_filetypes["User data"] = size_user_content size_total += size_user_content # The missing difference is content.json sizes @@ -156,11 +220,11 @@ def sidebarRenderFileStats(self, body, site): # Bar for extension, color in extensions: - if extension == "total": + if extension == "Total": continue - if extension == "other": + if extension == "Other": size = max(0, size_other) - elif extension == "image": + elif extension == "Image": size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0) size_other -= size else: @@ -173,17 +237,17 @@ def sidebarRenderFileStats(self, body, site): percent = math.floor(percent * 100) / 100 # Floor to 2 digits body.append( u"""
      • """ % - (percent, extension, color, extension) + (percent, _[extension], color, _[extension]) ) # Legend body.append("
        ") for extension, color in extensions: - if extension == "other": + if extension == "Other": size = max(0, size_other) - elif extension == "image": + elif extension == "Image": size = size_filetypes.get("jpg", 0) + size_filetypes.get("png", 0) + size_filetypes.get("gif", 0) - elif extension == "total": + elif extension == "Total": size = size_total else: size = size_filetypes.get(extension, 0) @@ -198,7 +262,7 @@ def sidebarRenderFileStats(self, body, site): else: size_formatted = "%.0fkB" % (size / 1024) - body.append(u"
      • %s:%s
      • " % (color, title, size_formatted)) + body.append(u"
      • %s:%s
      • " % (color, _[title], size_formatted)) body.append("
    • ") @@ -207,13 +271,14 @@ def sidebarRenderSizeLimit(self, body, site): size = float(site.settings["size"]) / 1024 / 1024 size_limit = site.getSizeLimit() percent_used = size / size_limit - body.append(""" + + body.append(_(u"""
    • - + MB - Set + {_[Set]}
    • - """.format(**locals())) + """)) def sidebarRenderOptionalFileStats(self, body, site): size_total = float(site.settings["size_optional"]) @@ -227,19 +292,19 @@ def sidebarRenderOptionalFileStats(self, body, site): size_formatted_total = size_total / 1024 / 1024 size_formatted_downloaded = size_downloaded / 1024 / 1024 - body.append(""" + body.append(_(u"""
    • - +
        -
      • -
      • +
      • +
        -
      • Downloaded:{size_formatted_downloaded:.2f}MB
      • -
      • Total:{size_formatted_total:.2f}MB
      • +
      • {_[Downloaded]}:{size_formatted_downloaded:.2f}MB
      • +
      • {_[Total]}:{size_formatted_total:.2f}MB
    • - """.format(**locals())) + """)) return True @@ -248,31 +313,44 @@ def sidebarRenderOptionalFileSettings(self, body, site): checked = "checked='checked'" else: checked = "" - body.append(""" + + body.append(_(u"""
    • - +
      -
    • - """.format(**locals())) + """)) + + autodownload_bigfile_size_limit = int(site.settings.get("autodownload_bigfile_size_limit", config.autodownload_bigfile_size_limit)) + body.append(_(u""" +
      + + MB + {_[Set]} +
      + """)) + body.append("") def sidebarRenderBadFiles(self, body, site): - body.append(""" + body.append(_(u"""
    • - +
        - """) + """)) i = 0 for bad_file, tries in site.bad_files.iteritems(): i += 1 - body.append("""
      • %s
      • """ % ( - cgi.escape(bad_file, True), tries, cgi.escape(bad_file, True)) - ) + body.append(_(u"""
      • {bad_filename}
      • """, { + "bad_file_path": bad_file, + "bad_filename": helper.getFilename(bad_file), + "tries": _.pluralize(tries, "{} try", "{} tries") + })) if i > 30: break if len(site.bad_files) > 30: - body.append("""
      • + %s more
      • """ % (len(site.bad_files) - 30)) + num_bad_files = len(site.bad_files) - 30 + body.append(_(u"""
      • {_[+ {num_bad_files} more]}
      • """, nested=True)) body.append("""
      @@ -285,21 +363,23 @@ def sidebarRenderDbOptions(self, body, site): size = float(site.storage.getSize(inner_path)) / 1024 feeds = len(site.storage.db.schema.get("feeds", {})) else: - inner_path = "No database found" + inner_path = _[u"No database found"] size = 0.0 feeds = 0 - body.append(u""" + body.append(_(u"""
    • - - - Reload - Rebuild + +
    • - """.format(**locals())) + """, nested=True)) def sidebarRenderIdentity(self, body, site): - auth_address = self.user.getAuthAddress(self.site.address) + auth_address = self.user.getAuthAddress(self.site.address, create=False) rules = self.site.content_manager.getRules("data/users/%s/content.json" % auth_address) if rules and rules.get("max_size"): quota = rules["max_size"] / 1024 @@ -312,16 +392,18 @@ def sidebarRenderIdentity(self, body, site): else: quota = used = 0 - body.append(""" + body.append(_(u"""
    • - - {auth_address} - Change + +
      + {auth_address} + {_[Change]} +
    • - """.format(**locals())) + """)) def sidebarRenderControls(self, body, site): - auth_address = self.user.getAuthAddress(self.site.address) + auth_address = self.user.getAuthAddress(self.site.address, create=False) if self.site.settings["serving"]: class_pause = "" class_resume = "hidden" @@ -329,24 +411,43 @@ def sidebarRenderControls(self, body, site): class_pause = "hidden" class_resume = "" - body.append(""" + body.append(_(u"""
    • - - Update - Pause - Resume - Delete + + {_[Update]} + {_[Pause]} + {_[Resume]} + {_[Delete]}
    • - """.format(**locals())) + """)) + donate_key = site.content_manager.contents.get("content.json", {}).get("donate", True) site_address = self.site.address - body.append(""" + body.append(_(u""" +
    • +
      +
      + {site_address} + """)) + if donate_key == False or donate_key == "": + pass + elif (type(donate_key) == str or type(donate_key) == unicode) and len(donate_key) > 0: + body.append(_(u""" +
      +
    • -
      - {site_address} - Donate +
      +
      + {donate_key} + """)) + else: + body.append(_(u""" + {_[Donate]} + """)) + body.append(_(u""" +
    • - """.format(**locals())) + """)) def sidebarRenderOwnedCheckbox(self, body, site): if self.site.settings["own"]: @@ -354,66 +455,71 @@ def sidebarRenderOwnedCheckbox(self, body, site): else: checked = "" - body.append(""" -

      This is my site

      + body.append(_(u""" +

      {_[This is my site]}

      - """.format(**locals())) + """)) def sidebarRenderOwnSettings(self, body, site): - title = cgi.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True) - description = cgi.escape(site.content_manager.contents.get("content.json", {}).get("description", ""), True) - privatekey = cgi.escape(self.user.getSiteData(site.address, create=False).get("privatekey", "")) + title = site.content_manager.contents.get("content.json", {}).get("title", "") + description = site.content_manager.contents.get("content.json", {}).get("description", "") - body.append(u""" + body.append(_(u"""
    • - +
    • - +
    • -
    • - - -
    • -
    • - Save site settings + {_[Save site settings]}
    • - """.format(**locals())) + """)) def sidebarRenderContents(self, body, site): - body.append(""" + has_privatekey = bool(self.user.getSiteData(site.address, create=False).get("privatekey")) + if has_privatekey: + tag_privatekey = _(u"{_[Private key saved.]} {_[Forgot]}") + else: + tag_privatekey = _(u"{_[Add saved private key]}") + + body.append(_(u"""
    • - - """) + + """.replace("{tag_privatekey}", tag_privatekey))) # Choose content you want to sign + body.append(_(u""" + + """)) + contents = ["content.json"] contents += site.content_manager.contents.get("content.json", {}).get("includes", {}).keys() - if len(contents) > 1: - body.append("
      Choose: ") - for content in contents: - content = cgi.escape(content, True) - body.append("{content} ".format(**locals())) - body.append("
      ") - - body.append(""" - - Sign - Publish -
    • - """) + body.append(_(u"
      {_[Choose]}: ")) + for content in contents: + body.append(_("{content} ")) + body.append("
      ") + body.append("") def actionSidebarGetHtmlTag(self, to): + permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") + site = self.site body = [] body.append("
      ") + body.append("×") body.append("

      %s

      " % cgi.escape(site.content_manager.contents.get("content.json", {}).get("title", ""), True)) body.append("
      ") @@ -441,6 +547,10 @@ def actionSidebarGetHtmlTag(self, to): body.append("
    ") body.append("") + body.append("") + self.response(to, "".join(body)) def downloadGeoLiteDb(self, db_path): @@ -450,7 +560,7 @@ def downloadGeoLiteDb(self, db_path): from util import helper self.log.info("Downloading GeoLite2 City database...") - self.cmd("notification", ["geolite-info", "Downloading GeoLite2 City database (one time only, ~20MB)...", 0]) + self.cmd("progress", ["geolite-info", _["Downloading GeoLite2 City database (one time only, ~20MB)..."], 0]) db_urls = [ "https://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz", "https://raw.githubusercontent.com/texnikru/GeoLite2-Database/master/GeoLite2-City.mmdb.gz" @@ -459,13 +569,18 @@ def downloadGeoLiteDb(self, db_path): try: # Download response = helper.httpRequest(db_url) - + data_size = response.getheader('content-length') + data_recv = 0 data = StringIO.StringIO() while True: buff = response.read(1024 * 512) if not buff: break data.write(buff) + data_recv += 1024 * 512 + if data_size: + progress = int(float(data_recv) / int(data_size) * 100) + self.cmd("progress", ["geolite-info", _["Downloading GeoLite2 City database (one time only, ~20MB)..."], progress]) self.log.info("GeoLite2 City database downloaded (%s bytes), unpacking..." % data.tell()) data.seek(0) @@ -473,78 +588,120 @@ def downloadGeoLiteDb(self, db_path): with gzip.GzipFile(fileobj=data) as gzip_file: shutil.copyfileobj(gzip_file, open(db_path, "wb")) - self.cmd("notification", ["geolite-done", "GeoLite2 City database downloaded!", 5000]) + self.cmd("progress", ["geolite-info", _["GeoLite2 City database downloaded!"], 100]) time.sleep(2) # Wait for notify animation return True - except Exception, err: + except Exception as err: self.log.error("Error downloading %s: %s" % (db_url, err)) pass - self.cmd("notification", [ - "geolite-error", - "GeoLite2 City database download error: %s!
    Please download and unpack to data dir:
    %s" % (err, db_urls[0]), - 0 + self.cmd("progress", [ + "geolite-info", + _["GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}"].format(err, db_urls[0]), + -100 ]) + def getLoc(self, geodb, ip): + global loc_cache + + if ip in loc_cache: + return loc_cache[ip] + else: + try: + loc_data = geodb.get(ip) + except: + loc_data = None + + if not loc_data or "location" not in loc_data: + loc_cache[ip] = None + return None + + loc = { + "lat": loc_data["location"]["latitude"], + "lon": loc_data["location"]["longitude"], + } + if "city" in loc_data: + loc["city"] = loc_data["city"]["names"]["en"] + + if "country" in loc_data: + loc["country"] = loc_data["country"]["names"]["en"] + + loc_cache[ip] = loc + return loc + + def getPeerLocations(self, peers): + import maxminddb + db_path = config.data_dir + '/GeoLite2-City.mmdb' + if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: + if not self.downloadGeoLiteDb(db_path): + return False + geodb = maxminddb.open_database(db_path) + + peers = peers.values() + # Place bars + peer_locations = [] + placed = {} # Already placed bars here + for peer in peers: + # Height of bar + if peer.connection and peer.connection.last_ping_delay: + ping = round(peer.connection.last_ping_delay * 1000) + else: + ping = None + loc = self.getLoc(geodb, peer.ip) + + if not loc: + continue + # Create position array + lat, lon = loc["lat"], loc["lon"] + latlon = "%s,%s" % (lat, lon) + if latlon in placed and helper.getIpType(peer.ip) == "ipv4": # Dont place more than 1 bar to same place, fake repos using ip address last two part + lat += float(128 - int(peer.ip.split(".")[-2])) / 50 + lon += float(128 - int(peer.ip.split(".")[-1])) / 50 + latlon = "%s,%s" % (lat, lon) + placed[latlon] = True + peer_location = {} + peer_location.update(loc) + peer_location["lat"] = lat + peer_location["lon"] = lon + peer_location["ping"] = ping + + peer_locations.append(peer_location) + + # Append myself + for ip in self.site.connection_server.ip_external_list: + my_loc = self.getLoc(geodb, ip) + if my_loc: + my_loc["ping"] = 0 + peer_locations.append(my_loc) + + return peer_locations + + def actionSidebarGetPeers(self, to): permissions = self.getPermissions(to) if "ADMIN" not in permissions: return self.response(to, "You don't have permission to run this command") try: - import maxminddb - db_path = config.data_dir + '/GeoLite2-City.mmdb' - if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: - if not self.downloadGeoLiteDb(db_path): - return False - geodb = maxminddb.open_database(db_path) - - peers = self.site.peers.values() - # Find avg ping + peer_locations = self.getPeerLocations(self.site.peers) + globe_data = [] ping_times = [ - peer.connection.last_ping_delay - for peer in peers - if peer.connection and peer.connection.last_ping_delay and peer.connection.last_ping_delay + peer_location["ping"] + for peer_location in peer_locations + if peer_location["ping"] ] if ping_times: ping_avg = sum(ping_times) / float(len(ping_times)) else: ping_avg = 0 - # Place bars - globe_data = [] - placed = {} # Already placed bars here - for peer in peers: - # Height of bar - if peer.connection and peer.connection.last_ping_delay: - ping = min(0.20, math.log(1 + peer.connection.last_ping_delay / ping_avg, 300)) - else: - ping = -0.03 - # Query and cache location - if peer.ip in loc_cache: - loc = loc_cache[peer.ip] + for peer_location in peer_locations: + if peer_location["ping"] == 0: # Me + height = -0.135 + elif peer_location["ping"]: + height = min(0.20, math.log(1 + peer_location["ping"] / ping_avg, 300)) else: - try: - loc = geodb.get(peer.ip) - except: - loc = None - loc_cache[peer.ip] = loc - if not loc or "location" not in loc: - continue - - # Create position array - lat, lon = (loc["location"]["latitude"], loc["location"]["longitude"]) - latlon = "%s,%s" % (lat, lon) - if latlon in placed: # Dont place more than 1 bar to same place, fake repos using ip address last two part - lat += float(128 - int(peer.ip.split(".")[-2])) / 50 - lon += float(128 - int(peer.ip.split(".")[-1])) / 50 - latlon = "%s,%s" % (lat, lon) - placed[latlon] = True - - globe_data += (lat, lon, ping) - # Append myself - loc = geodb.get(config.ip_external) - if loc: - lat, lon = (loc["location"]["latitude"], loc["location"]["longitude"]) - globe_data += (lat, lon, -0.135) + height = -0.03 + + globe_data += [peer_location["lat"], peer_location["lon"], height] self.response(to, globe_data) except Exception, err: @@ -553,28 +710,35 @@ def actionSidebarGetPeers(self, to): def actionSiteSetOwned(self, to, owned): permissions = self.getPermissions(to) - - if "Multiuser" in PluginManager.plugin_manager.plugin_names: - self.cmd("notification", ["info", "This function is disabled on this proxy"]) - return False - if "ADMIN" not in permissions: return self.response(to, "You don't have permission to run this command") + + if self.site.address == config.updatesite: + return self.response(to, "You can't change the ownership of the updater site") + self.site.settings["own"] = bool(owned) + self.site.updateWebsocket(owned=owned) - def actionSiteSetAutodownloadoptional(self, to, owned): + def actionUserSetSitePrivatekey(self, to, privatekey): permissions = self.getPermissions(to) + if "ADMIN" not in permissions: + return self.response(to, "You don't have permission to run this command") - if "Multiuser" in PluginManager.plugin_manager.plugin_names: - self.cmd("notification", ["info", "This function is disabled on this proxy"]) - return False + site_data = self.user.sites[self.site.address] + site_data["privatekey"] = privatekey + self.site.updateWebsocket(set_privatekey=bool(privatekey)) + + return "ok" + def actionSiteSetAutodownloadoptional(self, to, owned): + permissions = self.getPermissions(to) if "ADMIN" not in permissions: return self.response(to, "You don't have permission to run this command") + self.site.settings["autodownloadoptional"] = bool(owned) self.site.bad_files = {} gevent.spawn(self.site.update, check_files=True) - self.site.worker_manager.removeGoodFileTasks() + self.site.worker_manager.removeSolvedFileTasks() def actionDbReload(self, to): permissions = self.getPermissions(to) diff --git a/plugins/Sidebar/ZipStream.py b/plugins/Sidebar/ZipStream.py new file mode 100644 index 000000000..ea6283e47 --- /dev/null +++ b/plugins/Sidebar/ZipStream.py @@ -0,0 +1,59 @@ +import cStringIO as StringIO +import os +import zipfile + +class ZipStream(file): + def __init__(self, dir_path): + self.dir_path = dir_path + self.pos = 0 + self.buff_pos = 0 + self.zf = zipfile.ZipFile(self, 'w', zipfile.ZIP_DEFLATED, allowZip64=True) + self.buff = StringIO.StringIO() + self.file_list = self.getFileList() + + def getFileList(self): + for root, dirs, files in os.walk(self.dir_path): + for file in files: + file_path = root + "/" + file + relative_path = os.path.join(os.path.relpath(root, self.dir_path), file) + yield file_path, relative_path + self.zf.close() + + def read(self, size=60 * 1024): + for file_path, relative_path in self.file_list: + self.zf.write(file_path, relative_path) + if self.buff.tell() >= size: + break + self.buff.seek(0) + back = self.buff.read() + self.buff.truncate(0) + self.buff.seek(0) + self.buff_pos += len(back) + return back + + def write(self, data): + self.pos += len(data) + self.buff.write(data) + + def tell(self): + return self.pos + + def seek(self, pos, whence=0): + if pos >= self.buff_pos: + self.buff.seek(pos - self.buff_pos, whence) + self.pos = pos + + def flush(self): + pass + + +if __name__ == "__main__": + zs = ZipStream(".") + out = open("out.zip", "wb") + while 1: + data = zs.read() + print("Write %s" % len(data)) + if not data: + break + out.write(data) + out.close() diff --git a/plugins/Sidebar/languages/da.json b/plugins/Sidebar/languages/da.json new file mode 100644 index 000000000..a421292c9 --- /dev/null +++ b/plugins/Sidebar/languages/da.json @@ -0,0 +1,81 @@ +{ + "Peers": "Klienter", + "Connected": "Forbundet", + "Connectable": "Mulige", + "Connectable peers": "Mulige klienter", + + "Data transfer": "Data overførsel", + "Received": "Modtaget", + "Received bytes": "Bytes modtaget", + "Sent": "Sendt", + "Sent bytes": "Bytes sendt", + + "Files": "Filer", + "Total": "I alt", + "Image": "Image", + "Other": "Andet", + "User data": "Bruger data", + + "Size limit": "Side max størrelse", + "limit used": "brugt", + "free space": "fri", + "Set": "Opdater", + + "Optional files": "Valgfri filer", + "Downloaded": "Downloadet", + "Download and help distribute all files": "Download og hjælp med at dele filer", + "Total size": "Størrelse i alt", + "Downloaded files": "Filer downloadet", + + "Database": "Database", + "search feeds": "søgninger", + "{feeds} query": "{feeds} søgninger", + "Reload": "Genindlæs", + "Rebuild": "Genopbyg", + "No database found": "Ingen database fundet", + + "Identity address": "Autorisations ID", + "Change": "Skift", + + "Update": "Opdater", + "Pause": "Pause", + "Resume": "Aktiv", + "Delete": "Slet", + "Are you sure?": "Er du sikker?", + + "Site address": "Side addresse", + "Donate": "Doner penge", + + "Missing files": "Manglende filer", + "{} try": "{} forsøg", + "{} tries": "{} forsøg", + "+ {num_bad_files} more": "+ {num_bad_files} mere", + + "This is my site": "Dette er min side", + "Site title": "Side navn", + "Site description": "Side beskrivelse", + "Save site settings": "Gem side opsætning", + + "Content publishing": "Indhold offentliggøres", + "Choose": "Vælg", + "Sign": "Signer", + "Publish": "Offentliggør", + + "This function is disabled on this proxy": "Denne funktion er slået fra på denne ZeroNet proxyEz a funkció ki van kapcsolva ezen a proxy-n", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 City database kunne ikke downloades: {}!
    Download venligst databasen manuelt og udpak i data folder:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 város adatbázis letöltése (csak egyszer kell, kb 20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 City database downloadet!", + + "Are you sure?": "Er du sikker?", + "Site storage limit modified!": "Side max størrelse ændret!", + "Database schema reloaded!": "Database definition genindlæst!", + "Database rebuilding....": "Genopbygger database...", + "Database rebuilt!": "Database genopbygget!", + "Site updated!": "Side opdateret!", + "Delete this site": "Slet denne side", + "File write error: ": "Fejl ved skrivning af fil: ", + "Site settings saved!": "Side opsætning gemt!", + "Enter your private key:": "Indtast din private nøgle:", + " Signed!": " Signeret!", + "WebGL not supported": "WebGL er ikke supporteret" +} \ No newline at end of file diff --git a/plugins/Sidebar/languages/de.json b/plugins/Sidebar/languages/de.json new file mode 100644 index 000000000..2f5feacd8 --- /dev/null +++ b/plugins/Sidebar/languages/de.json @@ -0,0 +1,81 @@ +{ + "Peers": "Peers", + "Connected": "Verbunden", + "Connectable": "Verbindbar", + "Connectable peers": "Verbindbare Peers", + + "Data transfer": "Datei Transfer", + "Received": "Empfangen", + "Received bytes": "Empfangene Bytes", + "Sent": "Gesendet", + "Sent bytes": "Gesendete Bytes", + + "Files": "Dateien", + "Total": "Gesamt", + "Image": "Bilder", + "Other": "Sonstiges", + "User data": "Nutzer Daten", + + "Size limit": "Speicher Limit", + "limit used": "Limit benutzt", + "free space": "freier Speicher", + "Set": "Setzten", + + "Optional files": "Optionale Dateien", + "Downloaded": "Heruntergeladen", + "Download and help distribute all files": "Herunterladen und helfen alle Dateien zu verteilen", + "Total size": "Gesamte Größe", + "Downloaded files": "Heruntergeladene Dateien", + + "Database": "Datenbank", + "search feeds": "Feeds durchsuchen", + "{feeds} query": "{feeds} Abfrage", + "Reload": "Neu laden", + "Rebuild": "Neu bauen", + "No database found": "Keine Datenbank gefunden", + + "Identity address": "Identitäts Adresse", + "Change": "Ändern", + + "Update": "Aktualisieren", + "Pause": "Pausieren", + "Resume": "Fortsetzen", + "Delete": "Löschen", + "Are you sure?": "Bist du sicher?", + + "Site address": "Seiten Adresse", + "Donate": "Spenden", + + "Missing files": "Fehlende Dateien", + "{} try": "{} versuch", + "{} tries": "{} versuche", + "+ {num_bad_files} more": "+ {num_bad_files} mehr", + + "This is my site": "Das ist meine Seite", + "Site title": "Seiten Titel", + "Site description": "Seiten Beschreibung", + "Save site settings": "Einstellungen der Seite speichern", + + "Content publishing": "Inhaltsveröffentlichung", + "Choose": "Wähle", + "Sign": "Signieren", + "Publish": "Veröffentlichen", + + "This function is disabled on this proxy": "Diese Funktion ist auf dieser Proxy deaktiviert", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 City Datenbank Download Fehler: {}!
    Bitte manuell herunterladen und die Datei in das Datei Verzeichnis extrahieren:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Herunterladen der GeoLite2 City Datenbank (einmalig, ~20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 City Datenbank heruntergeladen!", + + "Are you sure?": "Bist du sicher?", + "Site storage limit modified!": "Speicher Limit der Seite modifiziert!", + "Database schema reloaded!": "Datebank Schema neu geladen!", + "Database rebuilding....": "Datenbank neu bauen...", + "Database rebuilt!": "Datenbank neu gebaut!", + "Site updated!": "Seite aktualisiert!", + "Delete this site": "Diese Seite löschen", + "File write error: ": "Datei schreib fehler:", + "Site settings saved!": "Seiten Einstellungen gespeichert!", + "Enter your private key:": "Gib deinen privaten Schlüssel ein:", + " Signed!": " Signiert!", + "WebGL not supported": "WebGL nicht unterstützt" +} diff --git a/plugins/Sidebar/languages/es.json b/plugins/Sidebar/languages/es.json new file mode 100644 index 000000000..b9e98c465 --- /dev/null +++ b/plugins/Sidebar/languages/es.json @@ -0,0 +1,79 @@ +{ + "Peers": "Pares", + "Connected": "Conectados", + "Connectable": "Conectables", + "Connectable peers": "Pares conectables", + + "Data transfer": "Transferencia de datos", + "Received": "Recibidos", + "Received bytes": "Bytes recibidos", + "Sent": "Enviados", + "Sent bytes": "Bytes envidados", + + "Files": "Ficheros", + "Total": "Total", + "Image": "Imagen", + "Other": "Otro", + "User data": "Datos del usuario", + + "Size limit": "Límite de tamaño", + "limit used": "Límite utilizado", + "free space": "Espacio libre", + "Set": "Establecer", + + "Optional files": "Ficheros opcionales", + "Downloaded": "Descargado", + "Download and help distribute all files": "Descargar y ayudar a distribuir todos los ficheros", + "Total size": "Tamaño total", + "Downloaded files": "Ficheros descargados", + + "Database": "Base de datos", + "search feeds": "Fuentes de búsqueda", + "{feeds} query": "{feeds} consulta", + "Reload": "Recargar", + "Rebuild": "Reconstruir", + "No database found": "No se ha encontrado la base de datos", + + "Identity address": "Dirección de la identidad", + "Change": "Cambiar", + + "Update": "Actualizar", + "Pause": "Pausar", + "Resume": "Reanudar", + "Delete": "Borrar", + + "Site address": "Dirección del sitio", + "Donate": "Donar", + + "Missing files": "Ficheros perdidos", + "{} try": "{} intento", + "{} tries": "{} intentos", + "+ {num_bad_files} more": "+ {num_bad_files} más", + + "This is my site": "Este es mi sitio", + "Site title": "Título del sitio", + "Site description": "Descripción del sitio", + "Save site settings": "Guardar la configuración del sitio", + + "Content publishing": "Publicación del contenido", + "Choose": "Elegir", + "Sign": "Firmar", + "Publish": "Publicar", + "This function is disabled on this proxy": "Esta función está deshabilitada en este proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "¡Error de la base de datos GeoLite2: {}!
    Por favor, descárgalo manualmente y descomprime al directorio de datos:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Descargando la base de datos de GeoLite2 (una única vez, ~20MB)...", + "GeoLite2 City database downloaded!": "¡Base de datos de GeoLite2 descargada!", + + "Are you sure?": "¿Estás seguro?", + "Site storage limit modified!": "¡Límite de almacenamiento del sitio modificado!", + "Database schema reloaded!": "¡Esquema de la base de datos recargado!", + "Database rebuilding....": "Reconstruyendo la base de datos...", + "Database rebuilt!": "¡Base de datos reconstruida!", + "Site updated!": "¡Sitio actualizado!", + "Delete this site": "Borrar este sitio", + "File write error: ": "Error de escritura de fichero:", + "Site settings saved!": "¡Configuración del sitio guardada!", + "Enter your private key:": "Introduce tu clave privada:", + " Signed!": " ¡firmado!", + "WebGL not supported": "WebGL no está soportado" +} diff --git a/plugins/Sidebar/languages/fr.json b/plugins/Sidebar/languages/fr.json new file mode 100644 index 000000000..5c4b3ac70 --- /dev/null +++ b/plugins/Sidebar/languages/fr.json @@ -0,0 +1,82 @@ +{ + "Peers": "Pairs", + "Connected": "Connectés", + "Connectable": "Accessibles", + "Connectable peers": "Pairs accessibles", + + "Data transfer": "Données transférées", + "Received": "Reçues", + "Received bytes": "Bytes reçus", + "Sent": "Envoyées", + "Sent bytes": "Bytes envoyés", + + "Files": "Fichiers", + "Total": "Total", + "Image": "Image", + "Other": "Autre", + "User data": "Utilisateurs", + + "Size limit": "Taille maximale", + "limit used": "utlisé", + "free space": "libre", + "Set": "Modifier", + + "Optional files": "Fichiers optionnels", + "Downloaded": "Téléchargé", + "Download and help distribute all files": "Télécharger et distribuer tous les fichiers", + "Total size": "Taille totale", + "Downloaded files": "Fichiers téléchargés", + + "Database": "Base de données", + "search feeds": "recherche", + "{feeds} query": "{feeds} requête", + "Reload": "Recharger", + "Rebuild": "Reconstruire", + "No database found": "Aucune base de données trouvée", + + "Identity address": "Adresse d'identité", + "Change": "Modifier", + + "Site control": "Opérations", + "Update": "Mettre à jour", + "Pause": "Suspendre", + "Resume": "Reprendre", + "Delete": "Supprimer", + "Are you sure?": "Êtes-vous certain?", + + "Site address": "Adresse du site", + "Donate": "Faire un don", + + "Missing files": "Fichiers manquants", + "{} try": "{} essai", + "{} tries": "{} essais", + "+ {num_bad_files} more": "+ {num_bad_files} manquants", + + "This is my site": "Ce site m'appartient", + "Site title": "Nom du site", + "Site description": "Description du site", + "Save site settings": "Enregistrer les paramètres", + + "Content publishing": "Publication du contenu", + "Choose": "Sélectionner", + "Sign": "Signer", + "Publish": "Publier", + + "This function is disabled on this proxy": "Cette fonction est désactivé sur ce proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Erreur au téléchargement de la base de données GeoLite2: {}!
    Téléchargez et décompressez dans le dossier data:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Téléchargement de la base de données GeoLite2 (une seule fois, ~20MB)...", + "GeoLite2 City database downloaded!": "Base de données GeoLite2 téléchargée!", + + "Are you sure?": "Êtes-vous certain?", + "Site storage limit modified!": "Taille maximale modifiée!", + "Database schema reloaded!": "Base de données rechargée!", + "Database rebuilding....": "Reconstruction de la base de données...", + "Database rebuilt!": "Base de données reconstruite!", + "Site updated!": "Site mis à jour!", + "Delete this site": "Supprimer ce site", + "File write error: ": "Erreur à l'écriture du fichier: ", + "Site settings saved!": "Paramètres du site enregistrés!", + "Enter your private key:": "Entrez votre clé privée:", + " Signed!": " Signé!", + "WebGL not supported": "WebGL n'est pas supporté" +} diff --git a/plugins/Sidebar/languages/hu.json b/plugins/Sidebar/languages/hu.json new file mode 100644 index 000000000..40ed8fab3 --- /dev/null +++ b/plugins/Sidebar/languages/hu.json @@ -0,0 +1,82 @@ +{ + "Peers": "Csatlakozási pontok", + "Connected": "Csaltakozva", + "Connectable": "Csatlakozható", + "Connectable peers": "Csatlakozható peer-ek", + + "Data transfer": "Adatátvitel", + "Received": "Fogadott", + "Received bytes": "Fogadott byte-ok", + "Sent": "Küldött", + "Sent bytes": "Küldött byte-ok", + + "Files": "Fájlok", + "Total": "Összesen", + "Image": "Kép", + "Other": "Egyéb", + "User data": "Felh. adat", + + "Size limit": "Méret korlát", + "limit used": "felhasznált", + "free space": "szabad hely", + "Set": "Beállít", + + "Optional files": "Opcionális fájlok", + "Downloaded": "Letöltött", + "Download and help distribute all files": "Minden opcionális fájl letöltése", + "Total size": "Teljes méret", + "Downloaded files": "Letöltve", + + "Database": "Adatbázis", + "search feeds": "Keresés források", + "{feeds} query": "{feeds} lekérdezés", + "Reload": "Újratöltés", + "Rebuild": "Újraépítés", + "No database found": "Adatbázis nem található", + + "Identity address": "Azonosító cím", + "Change": "Módosít", + + "Site control": "Oldal műveletek", + "Update": "Frissít", + "Pause": "Szünteltet", + "Resume": "Folytat", + "Delete": "Töröl", + "Are you sure?": "Biztos vagy benne?", + + "Site address": "Oldal címe", + "Donate": "Támogatás", + + "Missing files": "Hiányzó fájlok", + "{} try": "{} próbálkozás", + "{} tries": "{} próbálkozás", + "+ {num_bad_files} more": "+ még {num_bad_files} darab", + + "This is my site": "Ez az én oldalam", + "Site title": "Oldal neve", + "Site description": "Oldal leírása", + "Save site settings": "Oldal beállítások mentése", + + "Content publishing": "Tartalom publikálás", + "Choose": "Válassz", + "Sign": "Aláírás", + "Publish": "Publikálás", + + "This function is disabled on this proxy": "Ez a funkció ki van kapcsolva ezen a proxy-n", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 város adatbázis letöltési hiba: {}!
    A térképhez töltsd le és csomagold ki a data könyvtárba:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 város adatbázis letöltése (csak egyszer kell, kb 20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 város adatbázis letöltve!", + + "Are you sure?": "Biztos vagy benne?", + "Site storage limit modified!": "Az oldalt méret korlát módosítva!", + "Database schema reloaded!": "Adatbázis séma újratöltve!", + "Database rebuilding....": "Adatbázis újraépítés...", + "Database rebuilt!": "Adatbázis újraépítve!", + "Site updated!": "Az oldal frissítve!", + "Delete this site": "Az oldal törlése", + "File write error: ": "Fájl írási hiba: ", + "Site settings saved!": "Az oldal beállításai elmentve!", + "Enter your private key:": "Add meg a prviát kulcsod:", + " Signed!": " Aláírva!", + "WebGL not supported": "WebGL nem támogatott" +} \ No newline at end of file diff --git a/plugins/Sidebar/languages/it.json b/plugins/Sidebar/languages/it.json new file mode 100644 index 000000000..6aa0969a7 --- /dev/null +++ b/plugins/Sidebar/languages/it.json @@ -0,0 +1,81 @@ +{ + "Peers": "Peer", + "Connected": "Connessi", + "Connectable": "Collegabili", + "Connectable peers": "Peer collegabili", + + "Data transfer": "Trasferimento dati", + "Received": "Ricevuti", + "Received bytes": "Byte ricevuti", + "Sent": "Inviati", + "Sent bytes": "Byte inviati", + + "Files": "File", + "Total": "Totale", + "Image": "Imagine", + "Other": "Altro", + "User data": "Dati utente", + + "Size limit": "Limite dimensione", + "limit used": "limite usato", + "free space": "spazio libero", + "Set": "Imposta", + + "Optional files": "File facoltativi", + "Downloaded": "Scaricati", + "Download and help distribute all files": "Scarica e aiuta a distribuire tutti i file", + "Total size": "Dimensione totale", + "Downloaded files": "File scaricati", + + "Database": "Database", + "search feeds": "ricerca di feed", + "{feeds} query": "{feeds} interrogazione", + "Reload": "Ricaricare", + "Rebuild": "Ricostruire", + "No database found": "Nessun database trovato", + + "Identity address": "Indirizzo di identità", + "Change": "Cambia", + + "Update": "Aggiorna", + "Pause": "Sospendi", + "Resume": "Riprendi", + "Delete": "Cancella", + "Are you sure?": "Sei sicuro?", + + "Site address": "Indirizzo sito", + "Donate": "Dona", + + "Missing files": "File mancanti", + "{} try": "{} tenta", + "{} tries": "{} prova", + "+ {num_bad_files} more": "+ {num_bad_files} altri", + + "This is my site": "Questo è il mio sito", + "Site title": "Titolo sito", + "Site description": "Descrizione sito", + "Save site settings": "Salva impostazioni sito", + + "Content publishing": "Pubblicazione contenuto", + "Choose": "Scegli", + "Sign": "Firma", + "Publish": "Pubblica", + + "This function is disabled on this proxy": "Questa funzione è disabilitata su questo proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Errore scaricamento database GeoLite2 City: {}!
    Si prega di scaricarlo manualmente e spacchetarlo nella cartella dir:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Scaricamento database GeoLite2 City (solo una volta, ~20MB)...", + "GeoLite2 City database downloaded!": "Database GeoLite2 City scaricato!", + + "Are you sure?": "Sei sicuro?", + "Site storage limit modified!": "Limite di archiviazione del sito modificato!", + "Database schema reloaded!": "Schema database ricaricato!", + "Database rebuilding....": "Ricostruzione database...", + "Database rebuilt!": "Database ricostruito!", + "Site updated!": "Sito aggiornato!", + "Delete this site": "Cancella questo sito", + "File write error: ": "Errore scrittura file:", + "Site settings saved!": "Impostazioni sito salvate!", + "Enter your private key:": "Inserisci la tua chiave privata:", + " Signed!": " Firmato!", + "WebGL not supported": "WebGL non supportato" +} diff --git a/plugins/Sidebar/languages/jp.json b/plugins/Sidebar/languages/jp.json new file mode 100644 index 000000000..99b345648 --- /dev/null +++ b/plugins/Sidebar/languages/jp.json @@ -0,0 +1,82 @@ +{ + "Peers": "ピア", + "Connected": "接続済み", + "Connectable": "利用可能", + "Connectable peers": "ピアに接続可能", + + "Data transfer": "データ転送", + "Received": "受信", + "Received bytes": "受信バイト数", + "Sent": "送信", + "Sent bytes": "送信バイト数", + + "Files": "ファイル", + "Total": "合計", + "Image": "画像", + "Other": "その他", + "User data": "ユーザーデータ", + + "Size limit": "サイズ制限", + "limit used": "使用上限", + "free space": "フリースペース", + "Set": "セット", + + "Optional files": "オプション ファイル", + "Downloaded": "ダウンロード済み", + "Download and help distribute all files": "ダウンロードしてすべてのファイルの配布を支援する", + "Total size": "合計サイズ", + "Downloaded files": "ダウンロードされたファイル", + + "Database": "データベース", + "search feeds": "フィードを検索する", + "{feeds} query": "{フィード} お問い合わせ", + "Reload": "再読込", + "Rebuild": "再ビルド", + "No database found": "データベースが見つかりません", + + "Identity address": "Identity address", + "Change": "編集", + + "Site control": "サイト管理", + "Update": "更新", + "Pause": "一時停止", + "Resume": "再開", + "Delete": "削除", + "Are you sure?": "本当によろしいですか?", + + "Site address": "サイトアドレス", + "Donate": "寄付する", + + "Missing files": "ファイルがありません", + "{} try": "{} 試す", + "{} tries": "{} 試行", + "+ {num_bad_files} more": "+ {num_bad_files} more", + + "This is my site": "This is my site", + "Site title": "サイトタイトル", + "Site description": "サイトの説明", + "Save site settings": "サイトの設定を保存する", + + "Content publishing": "コンテンツを公開する", + "Choose": "選択", + "Sign": "Sign", + "Publish": "公開する", + + "This function is disabled on this proxy": "この機能はこのプロキシで無効になっています", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 Cityデータベースのダウンロードエラー: {}!
    手動でダウンロードして、フォルダに解凍してください。:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Cityデータベースの読み込み (これは一度だけ行われます, ~20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 Cityデータベースがダウンロードされました!", + + "Are you sure?": "本当によろしいですか?", + "Site storage limit modified!": "サイトの保存容量の制限が変更されました!", + "Database schema reloaded!": "データベーススキーマがリロードされました!", + "Database rebuilding....": "データベースの再構築中....", + "Database rebuilt!": "データベースが再構築されました!", + "Site updated!": "サイトが更新されました!", + "Delete this site": "このサイトを削除する", + "File write error: ": "ファイル書き込みエラー:", + "Site settings saved!": "サイト設定が保存されました!", + "Enter your private key:": "秘密鍵を入力してください:", + " Signed!": " Signed!", + "WebGL not supported": "WebGLはサポートされていません" +} diff --git a/plugins/Sidebar/languages/pl.json b/plugins/Sidebar/languages/pl.json new file mode 100644 index 000000000..932685078 --- /dev/null +++ b/plugins/Sidebar/languages/pl.json @@ -0,0 +1,82 @@ +{ + "Peers": "Użytkownicy równorzędni", + "Connected": "Połączony", + "Connectable": "Możliwy do podłączenia", + "Connectable peers": "Połączeni użytkownicy równorzędni", + + "Data transfer": "Transfer danych", + "Received": "Odebrane", + "Received bytes": "Odebrany bajty", + "Sent": "Wysłane", + "Sent bytes": "Wysłane bajty", + + "Files": "Pliki", + "Total": "Sumarycznie", + "Image": "Obraz", + "Other": "Inne", + "User data": "Dane użytkownika", + + "Size limit": "Rozmiar limitu", + "limit used": "zużyty limit", + "free space": "wolna przestrzeń", + "Set": "Ustaw", + + "Optional files": "Pliki opcjonalne", + "Downloaded": "Ściągnięte", + "Download and help distribute all files": "Ściągnij i pomóż rozpowszechniać wszystkie pliki", + "Total size": "Rozmiar sumaryczny", + "Downloaded files": "Ściągnięte pliki", + + "Database": "Baza danych", + "search feeds": "przeszukaj zasoby", + "{feeds} query": "{feeds} pytanie", + "Reload": "Odśwież", + "Rebuild": "Odbuduj", + "No database found": "Nie odnaleziono bazy danych", + + "Identity address": "Adres identyfikacyjny", + "Change": "Zmień", + + "Site control": "Kontrola strony", + "Update": "Zaktualizuj", + "Pause": "Wstrzymaj", + "Resume": "Wznów", + "Delete": "Skasuj", + "Are you sure?": "Jesteś pewien?", + + "Site address": "Adres strony", + "Donate": "Wspomóż", + + "Missing files": "Brakujące pliki", + "{} try": "{} próba", + "{} tries": "{} próby", + "+ {num_bad_files} more": "+ {num_bad_files} więcej", + + "This is my site": "To moja strona", + "Site title": "Tytuł strony", + "Site description": "Opis strony", + "Save site settings": "Zapisz ustawienia strony", + + "Content publishing": "Publikowanie treści", + "Choose": "Wybierz", + "Sign": "Podpisz", + "Publish": "Opublikuj", + + "This function is disabled on this proxy": "Ta funkcja jest zablokowana w tym proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Błąd ściągania bazy danych GeoLite2 City: {}!
    Proszę ściągnąć ją recznie i wypakować do katalogu danych:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Ściąganie bazy danych GeoLite2 City (tylko jednorazowo, ok. 20MB)...", + "GeoLite2 City database downloaded!": "Baza danych GeoLite2 City ściagnięta!", + + "Are you sure?": "Jesteś pewien?", + "Site storage limit modified!": "Limit pamięci strony zmodyfikowany!", + "Database schema reloaded!": "Schemat bazy danych załadowany ponownie!", + "Database rebuilding....": "Przebudowywanie bazy danych...", + "Database rebuilt!": "Baza danych przebudowana!", + "Site updated!": "Strona zaktualizowana!", + "Delete this site": "Usuń tę stronę", + "File write error: ": "Błąd zapisu pliku: ", + "Site settings saved!": "Ustawienia strony zapisane!", + "Enter your private key:": "Wpisz swój prywatny klucz:", + " Signed!": " Podpisane!", + "WebGL not supported": "WebGL nie jest obsługiwany" +} diff --git a/plugins/Sidebar/languages/pt-br.json b/plugins/Sidebar/languages/pt-br.json new file mode 100644 index 000000000..44fe06a2d --- /dev/null +++ b/plugins/Sidebar/languages/pt-br.json @@ -0,0 +1,97 @@ +{ + "Copy to clipboard": "Copiar para área de transferência (clipboard)", + "Peers": "Peers", + "Connected": "Ligados", + "Connectable": "Disponíveis", + "Onion": "Onion", + "Local": "Locais", + "Connectable peers": "Peers disponíveis", + + "Data transfer": "Transferência de dados", + "Received": "Recebidos", + "Received bytes": "Bytes recebidos", + "Sent": "Enviados", + "Sent bytes": "Bytes enviados", + + "Files": "Arquivos", + "Save as .zip": "Salvar como .zip", + "Total": "Total", + "Image": "Imagem", + "Other": "Outros", + "User data": "Dados do usuário", + + "Size limit": "Limite de tamanho", + "limit used": "limite utilizado", + "free space": "espaço livre", + "Set": "Definir", + + "Optional files": "Arquivos opcionais", + "Downloaded": "Baixados", + "Download and help distribute all files": "Baixar e ajudar a distribuir todos os arquivos", + "Total size": "Tamanho total", + "Downloaded files": "Arquivos baixados", + + "Database": "Banco de dados", + "search feeds": "pesquisar feeds", + "{feeds} query": "consulta de {feeds}", + "Reload": "Recarregar", + "Rebuild": "Reconstruir", + "No database found": "Base de dados não encontrada", + + "Identity address": "Endereço de identidade", + "Change": "Alterar", + + "Site control": "Controle do site", + "Update": "Atualizar", + "Pause": "Suspender", + "Resume": "Continuar", + "Delete": "Remover", + "Are you sure?": "Tem certeza?", + + "Site address": "Endereço do site", + "Donate": "Doar", + + "Needs to be updated": "Necessitam ser atualizados", + "{} try": "{} tentativa", + "{} tries": "{} tentativas", + "+ {num_bad_files} more": "+ {num_bad_files} adicionais", + + "This is my site": "Este é o meu site", + "Site title": "Título do site", + "Site description": "Descrição do site", + "Save site settings": "Salvar definições do site", + "Open site directory": "Abrir diretório do site", + + "Content publishing": "内容发布", + "Content publishing": "Publicação do conteúdo", + "Choose": "Escolher", + "Sign": "Assinar", + "Publish": "Publicar", + "Sign and publish": "Assinar e publicar", + "add saved private key": "adicionar privatekey (chave privada) para salvar", + "Private key saved for site signing": "Privatekey foi salva para assinar o site", + "Private key saved.": "Privatekey salva.", + "forgot": "esquecer", + "Saved private key removed": "Privatekey salva foi removida", + "This function is disabled on this proxy": "Esta função encontra-se desativada neste proxy", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Erro ao baixar a base de dados GeoLite2 City: {}!
    Por favor baixe manualmente e descompacte os dados para a seguinte pasta:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Baixando a base de dados GeoLite2 City (uma única vez, ~20MB)...", + "GeoLite2 City database downloaded!": "A base de dados GeoLite2 City foi baixada!", + + "Are you sure?": "Tem certeza?", + "Site storage limit modified!": "O limite de armazenamento do site foi modificado!", + "Database schema reloaded!": "O esquema da base de dados foi atualizado!", + "Database rebuilding....": "Reconstruindo base de dados...", + "Database rebuilt!": "Base de dados reconstruída!", + "Site updated!": "Site atualizado!", + "Delete this site": "Remover este site", + "Blacklist": "Blacklist", + "Blacklist this site": "Blacklistar este site", + "Reason": "Motivo", + "Delete and Blacklist": "Deletar e blacklistar", + "File write error: ": "Erro de escrita de arquivo: ", + "Site settings saved!": "Definições do site salvas!", + "Enter your private key:": "Digite sua chave privada:", + " Signed!": " Assinado!", + "WebGL not supported": "WebGL não é suportado" +} diff --git a/plugins/Sidebar/languages/ru.json b/plugins/Sidebar/languages/ru.json new file mode 100644 index 000000000..f2eeca04a --- /dev/null +++ b/plugins/Sidebar/languages/ru.json @@ -0,0 +1,82 @@ +{ + "Peers": "Пиры", + "Connected": "Подключенные", + "Connectable": "Доступные", + "Connectable peers": "Пиры доступны для подключения", + + "Data transfer": "Передача данных", + "Received": "Получено", + "Received bytes": "Получено байн", + "Sent": "Отправлено", + "Sent bytes": "Отправлено байт", + + "Files": "Файлы", + "Total": "Всего", + "Image": "Изображений", + "Other": "Другое", + "User data": "Ваш контент", + + "Size limit": "Ограничение по размеру", + "limit used": "Использовано", + "free space": "Доступно", + "Set": "Установить", + + "Optional files": "Опциональные файлы", + "Downloaded": "Загружено", + "Download and help distribute all files": "Загрузить опциональные файлы для помощи сайту", + "Total size": "Объём", + "Downloaded files": "Загруженные файлы", + + "Database": "База данных", + "search feeds": "поиск подписок", + "{feeds} query": "{feeds} запрос", + "Reload": "Перезагрузить", + "Rebuild": "Перестроить", + "No database found": "База данных не найдена", + + "Identity address": "Уникальный адрес", + "Change": "Изменить", + + "Site control": "Управление сайтом", + "Update": "Обновить", + "Pause": "Пауза", + "Resume": "Продолжить", + "Delete": "Удалить", + "Are you sure?": "Вы уверены?", + + "Site address": "Адрес сайта", + "Donate": "Пожертвовать", + + "Missing files": "Отсутствующие файлы", + "{} try": "{} попробовать", + "{} tries": "{} попыток", + "+ {num_bad_files} more": "+ {num_bad_files} ещё", + + "This is my site": "Это мой сайт", + "Site title": "Название сайта", + "Site description": "Описание сайта", + "Save site settings": "Сохранить настройки сайта", + + "Content publishing": "Публикация контента", + "Choose": "Выбрать", + "Sign": "Подписать", + "Publish": "Опубликовать", + + "This function is disabled on this proxy": "Эта функция отключена на этом прокси", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "Ошибка загрузки базы городов GeoLite2: {}!
    Пожалуйста, загрузите её вручную и распакуйте в папку:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "Загрузка базы городов GeoLite2 (это делается только 1 раз, ~20MB)...", + "GeoLite2 City database downloaded!": "База GeoLite2 успешно загружена!", + + "Are you sure?": "Вы уверены?", + "Site storage limit modified!": "Лимит хранилища для сайта изменен!", + "Database schema reloaded!": "Схема базы данных перезагружена!", + "Database rebuilding....": "Перестройка базы данных...", + "Database rebuilt!": "База данных перестроена!", + "Site updated!": "Сайт обновлён!", + "Delete this site": "Удалить этот сайт", + "File write error: ": "Ошибка записи файла:", + "Site settings saved!": "Настройки сайта сохранены!", + "Enter your private key:": "Введите свой приватный ключ:", + " Signed!": " Подписано!", + "WebGL not supported": "WebGL не поддерживается" +} diff --git a/plugins/Sidebar/languages/tr.json b/plugins/Sidebar/languages/tr.json new file mode 100644 index 000000000..88fcd6e01 --- /dev/null +++ b/plugins/Sidebar/languages/tr.json @@ -0,0 +1,82 @@ +{ + "Peers": "Eşler", + "Connected": "Bağlı", + "Connectable": "Erişilebilir", + "Connectable peers": "Bağlanılabilir eşler", + + "Data transfer": "Veri aktarımı", + "Received": "Alınan", + "Received bytes": "Bayt alındı", + "Sent": "Gönderilen", + "Sent bytes": "Bayt gönderildi", + + "Files": "Dosyalar", + "Total": "Toplam", + "Image": "Resim", + "Other": "Diğer", + "User data": "Kullanıcı verisi", + + "Size limit": "Boyut sınırı", + "limit used": "kullanılan", + "free space": "boş", + "Set": "Ayarla", + + "Optional files": "İsteğe bağlı dosyalar", + "Downloaded": "İndirilen", + "Download and help distribute all files": "Tüm dosyaları indir ve yayılmalarına yardım et", + "Total size": "Toplam boyut", + "Downloaded files": "İndirilen dosyalar", + + "Database": "Veritabanı", + "search feeds": "kaynak ara", + "{feeds} query": "{feeds} sorgu", + "Reload": "Yenile", + "Rebuild": "Yapılandır", + "No database found": "Veritabanı yok", + + "Identity address": "Kimlik adresi", + "Change": "Değiştir", + + "Site control": "Site kontrolü", + "Update": "Güncelle", + "Pause": "Duraklat", + "Resume": "Sürdür", + "Delete": "Sil", + "Are you sure?": "Emin misin?", + + "Site address": "Site adresi", + "Donate": "Bağış yap", + + "Missing files": "Eksik dosyalar", + "{} try": "{} deneme", + "{} tries": "{} deneme", + "+ {num_bad_files} more": "+ {num_bad_files} tane daha", + + "This is my site": "Bu benim sitem", + "Site title": "Site başlığı", + "Site description": "Site açıklaması", + "Save site settings": "Site ayarlarını kaydet", + + "Content publishing": "İçerik yayımlanıyor", + "Choose": "Seç", + "Sign": "İmzala", + "Publish": "Yayımla", + + "This function is disabled on this proxy": "Bu özellik bu vekilde kullanılamaz", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 Şehir veritabanı indirme hatası: {}!
    Lütfen kendiniz indirip aşağıdaki konuma açınınız:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Şehir veritabanı indiriliyor (sadece bir kere, ~20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 Şehir veritabanı indirildi!", + + "Are you sure?": "Emin misiniz?", + "Site storage limit modified!": "Site saklama sınırı değiştirildi!", + "Database schema reloaded!": "Veritabanı şeması yeniden yüklendi!", + "Database rebuilding....": "Veritabanı yeniden inşa ediliyor...", + "Database rebuilt!": "Veritabanı yeniden inşa edildi!", + "Site updated!": "Site güncellendi!", + "Delete this site": "Bu siteyi sil", + "File write error: ": "Dosya yazma hatası: ", + "Site settings saved!": "Site ayarları kaydedildi!", + "Enter your private key:": "Özel anahtarınızı giriniz:", + " Signed!": " İmzala!", + "WebGL not supported": "WebGL desteklenmiyor" +} diff --git a/plugins/Sidebar/languages/zh-tw.json b/plugins/Sidebar/languages/zh-tw.json new file mode 100644 index 000000000..9d4ea1bed --- /dev/null +++ b/plugins/Sidebar/languages/zh-tw.json @@ -0,0 +1,83 @@ +{ + "Peers": "節點數", + "Connected": "已連線", + "Connectable": "可連線", + "Connectable peers": "可連線節點", + + "Data transfer": "數據傳輸", + "Received": "已接收", + "Received bytes": "已接收位元組", + "Sent": "已傳送", + "Sent bytes": "已傳送位元組", + + "Files": "檔案", + "Total": "共計", + "Image": "圖片", + "Other": "其他", + "User data": "使用者數據", + + "Size limit": "大小限制", + "limit used": "已使用", + "free space": "可用空間", + "Set": "偏好設定", + + "Optional files": "可選檔案", + "Downloaded": "已下載", + "Download and help distribute all files": "下載並幫助分發所有檔案", + "Total size": "總大小", + "Downloaded files": "下載的檔案", + + "Database": "資料庫", + "search feeds": "搜尋供稿", + "{feeds} query": "{feeds} 查詢 ", + "Reload": "重新整理", + "Rebuild": "重建", + "No database found": "未找到資料庫", + + "Identity address": "身分位址", + "Change": "變更", + + "Site control": "網站控制", + "Update": "更新", + "Pause": "暫停", + "Resume": "恢復", + "Delete": "刪除", + "Are you sure?": "你確定?", + + "Site address": "網站位址", + "Donate": "捐贈", + + "Missing files": "缺少的檔案", + "{} try": "{} 嘗試", + "{} tries": "{} 已嘗試", + "+ {num_bad_files} more": "+ {num_bad_files} 更多", + + "This is my site": "這是我的網站", + "Site title": "網站標題", + "Site description": "網站描述", + "Save site settings": "存儲網站設定", + "Open site directory": "打開所在資料夾", + + "Content publishing": "內容發布", + "Choose": "選擇", + "Sign": "簽署", + "Publish": "發布", + "Sign and publish": "簽名並發布", + "This function is disabled on this proxy": "此代理上禁用此功能", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 地理位置資料庫下載錯誤:{}!
    請手動下載並解壓到數據目錄:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "正在下載 GeoLite2 地理位置資料庫 (僅一次,約 20MB )...", + "GeoLite2 City database downloaded!": "GeoLite2 地理位置資料庫已下載!", + + "Are you sure?": "你確定?", + "Site storage limit modified!": "網站存儲限制已變更!", + "Database schema reloaded!": "資料庫架構重新加載!", + "Database rebuilding....": "資料庫重建中...", + "Database rebuilt!": "資料庫已重建!", + "Site updated!": "網站已更新!", + "Delete this site": "刪除此網站", + "File write error: ": "檔案寫入錯誤:", + "Site settings saved!": "網站設置已保存!", + "Enter your private key:": "輸入您的私鑰:", + " Signed!": " 已簽署!", + "WebGL not supported": "不支援 WebGL" +} diff --git a/plugins/Sidebar/languages/zh.json b/plugins/Sidebar/languages/zh.json new file mode 100644 index 000000000..696084cf0 --- /dev/null +++ b/plugins/Sidebar/languages/zh.json @@ -0,0 +1,98 @@ +{ + "Copy to clipboard": "复制到剪切板", + "Peers": "节点数", + "Connected": "已连接", + "Connectable": "可连接", + "Onion": "洋葱点", + "Local": "局域网", + "Connectable peers": "可连接节点", + + "Data transfer": "数据传输", + "Received": "已接收", + "Received bytes": "已接收字节", + "Sent": "已发送", + "Sent bytes": "已发送字节", + + "Files": "文件", + "Save as .zip": "打包成zip文件", + "Total": "总计", + "Image": "图像", + "Other": "其他", + "User data": "用户数据", + + "Size limit": "大小限制", + "limit used": "限额", + "free space": "剩余空间", + "Set": "设置", + + "Optional files": "可选文件", + "Downloaded": "已下载", + "Download and help distribute all files": "下载并帮助分发所有文件", + "Auto download big file size limit": "自动下载大文件大小限制", + "Total size": "总大小", + "Downloaded files": "已下载文件", + + "Database": "数据库", + "search feeds": "搜索数据源", + "{feeds} query": "{feeds} 请求", + "Reload": "重载", + "Rebuild": "重建", + "No database found": "没有找到数据库", + + "Identity address": "身份地址", + "Change": "更改", + + "Site control": "站点控制", + "Update": "更新", + "Pause": "暂停", + "Resume": "恢复", + "Delete": "删除", + "Are you sure?": "您确定吗?", + + "Site address": "站点地址", + "Donate": "捐赠", + + "Needs to be updated": "需要更新", + "{} try": "{} 尝试", + "{} tries": "{} 已尝试", + "+ {num_bad_files} more": "+ {num_bad_files} 更多", + + "This is my site": "这是我的站点", + "Site title": "站点标题", + "Site description": "站点描述", + "Save site settings": "保存站点设置", + "Open site directory": "打开所在文件夹", + + "Content publishing": "内容发布", + "Add saved private key": "添加并保存私钥", + "Save": "保存", + "Private key saved.": "私钥已保存", + "Private key saved for site signing": "已保存用于站点签名的私钥", + "Forgot": "删除私钥", + "Saved private key removed": "保存的私钥已删除", + "Choose": "选择", + "Sign": "签名", + "Publish": "发布", + "Sign and publish": "签名并发布", + "This function is disabled on this proxy": "此功能在代理上被禁用", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 地理位置数据库下载错误:{}!
    请手动下载并解压在数据目录:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "正在下载 GeoLite2 地理位置数据库 (仅需一次,约 20MB )...", + "GeoLite2 City database downloaded!": "GeoLite2 地理位置数据库已下载!", + + "Are you sure?": "您确定吗?", + "Site storage limit modified!": "站点存储限制已更改!", + "Database schema reloaded!": "数据库模式已重新加载!", + "Database rebuilding....": "数据库重建中...", + "Database rebuilt!": "数据库已重建!", + "Site updated!": "站点已更新!", + "Delete this site": "删除此站点", + "Blacklist": "黑名单", + "Blacklist this site": "拉黑此站点", + "Reason": "原因", + "Delete and Blacklist": "删除并拉黑", + "File write error: ": "文件写入错误:", + "Site settings saved!": "站点设置已保存!", + "Enter your private key:": "输入您的私钥:", + " Signed!": " 已签名!", + "WebGL not supported": "不支持 WebGL" +} diff --git a/plugins/Sidebar/media-globe/all.js b/plugins/Sidebar/media-globe/all.js index 6d41940f5..5ddc03131 100644 --- a/plugins/Sidebar/media-globe/all.js +++ b/plugins/Sidebar/media-globe/all.js @@ -410,6 +410,7 @@ DAT.Globe = function(container, opts) { } function onMouseWheel(event) { + if (container.style.cursor != "move") return false; event.preventDefault(); if (overRenderer) { if (event.deltaY) { @@ -471,7 +472,6 @@ DAT.Globe = function(container, opts) { function unload() { running = false container.removeEventListener('mousedown', onMouseDown, false); - container.removeEventListener('mousewheel', onMouseWheel, false); if ('onwheel' in document) { container.removeEventListener('wheel', onMouseWheel, false); } else { @@ -526,7 +526,6 @@ DAT.Globe = function(container, opts) { - /* ---- plugins/Sidebar/media-globe/three.min.js ---- */ @@ -1343,4 +1342,4 @@ THREE.MorphBlendMesh.prototype.setAnimationDirectionForward=function(a){if(a=thi THREE.MorphBlendMesh.prototype.setAnimationDuration=function(a,b){var c=this.animationsMap[a];c&&(c.duration=b,c.fps=(c.end-c.start)/c.duration)};THREE.MorphBlendMesh.prototype.setAnimationWeight=function(a,b){var c=this.animationsMap[a];c&&(c.weight=b)};THREE.MorphBlendMesh.prototype.setAnimationTime=function(a,b){var c=this.animationsMap[a];c&&(c.time=b)};THREE.MorphBlendMesh.prototype.getAnimationTime=function(a){var b=0;if(a=this.animationsMap[a])b=a.time;return b}; THREE.MorphBlendMesh.prototype.getAnimationDuration=function(a){var b=-1;if(a=this.animationsMap[a])b=a.duration;return b};THREE.MorphBlendMesh.prototype.playAnimation=function(a){var b=this.animationsMap[a];b?(b.time=0,b.active=!0):console.warn("animation["+a+"] undefined")};THREE.MorphBlendMesh.prototype.stopAnimation=function(a){if(a=this.animationsMap[a])a.active=!1}; THREE.MorphBlendMesh.prototype.update=function(a){for(var b=0,c=this.animationsList.length;bd.duration||0>d.time)d.direction*=-1,d.time>d.duration&&(d.time=d.duration,d.directionBackwards=!0),0>d.time&&(d.time=0,d.directionBackwards=!1)}else d.time%=d.duration,0>d.time&&(d.time+=d.duration);var f=d.startFrame+THREE.Math.clamp(Math.floor(d.time/e),0,d.length-1),g=d.weight; -f!==d.currentFrame&&(this.morphTargetInfluences[d.lastFrame]=0,this.morphTargetInfluences[d.currentFrame]=1*g,this.morphTargetInfluences[f]=0,d.lastFrame=d.currentFrame,d.currentFrame=f);e=d.time%e/e;d.directionBackwards&&(e=1-e);this.morphTargetInfluences[d.currentFrame]=e*g;this.morphTargetInfluences[d.lastFrame]=(1-e)*g}}}; \ No newline at end of file +f!==d.currentFrame&&(this.morphTargetInfluences[d.lastFrame]=0,this.morphTargetInfluences[d.currentFrame]=1*g,this.morphTargetInfluences[f]=0,d.lastFrame=d.currentFrame,d.currentFrame=f);e=d.time%e/e;d.directionBackwards&&(e=1-e);this.morphTargetInfluences[d.currentFrame]=e*g;this.morphTargetInfluences[d.lastFrame]=(1-e)*g}}}; diff --git a/plugins/Sidebar/media-globe/globe.js b/plugins/Sidebar/media-globe/globe.js index eab71f9ea..a55237965 100644 --- a/plugins/Sidebar/media-globe/globe.js +++ b/plugins/Sidebar/media-globe/globe.js @@ -321,6 +321,7 @@ DAT.Globe = function(container, opts) { } function onMouseWheel(event) { + if (container.style.cursor != "move") return false; event.preventDefault(); if (overRenderer) { if (event.deltaY) { diff --git a/plugins/Sidebar/media/Internals.coffee b/plugins/Sidebar/media/Internals.coffee new file mode 100644 index 000000000..484ecdb79 --- /dev/null +++ b/plugins/Sidebar/media/Internals.coffee @@ -0,0 +1,60 @@ +class Internals extends Class + constructor: (@sidebar) -> + @tag = null + @opened = false + if window.top.location.hash == "#internals" + setTimeout (=> @open()), 10 + + createHtmltag: -> + @when_loaded = $.Deferred() + if not @container + @container = $(""" +
    +
    + + """) + @container.appendTo(document.body) + @tag = @container.find(".internals") + + open: => + @createHtmltag() + @sidebar.fixbutton_targety = @sidebar.page_height + @stopDragY() + + onOpened: => + @sidebar.onClosed() + @log "onOpened" + + onClosed: => + $(document.body).removeClass("body-internals") + + stopDragY: => + # Animate sidebar and iframe + if @sidebar.fixbutton_targety == @sidebar.fixbutton_inity + # Closed + targety = 0 + @opened = false + else + # Opened + targety = @sidebar.fixbutton_targety - @sidebar.fixbutton_inity + @onOpened() + @opened = true + + # Revent sidebar transitions + if @tag + @tag.css("transition", "0.5s ease-out") + @tag.css("transform", "translateY(#{targety}px)").one transitionEnd, => + @tag.css("transition", "") + if not @opened + @log "cleanup" + # Revert body transformations + @log "stopdrag", "opened:", @opened, targety + if not @opened + @onClosed() + +window.Internals = Internals \ No newline at end of file diff --git a/plugins/Sidebar/media/Internals.css b/plugins/Sidebar/media/Internals.css new file mode 100644 index 000000000..36b2489e0 --- /dev/null +++ b/plugins/Sidebar/media/Internals.css @@ -0,0 +1,17 @@ +.internals-container { width: 100%; z-index: 998; position: absolute; top: -100vh; } +.internals { background-color: #EEE; height: 100vh; transform: translateY(0px); } +.internals-middle {height: 0px; top: 50%; position: absolute; width: 100%; left: 50%; } + +.internals .mynode { + border: 0.5px solid #aaa; width: 50px; height: 50px; transform: rotateZ(45deg); margin-top: -25px; margin-left: -25px; + opacity: 1; display: inline-block; background-color: #EEE; z-index: 9; position: absolute; outline: 5px solid #EEE; +} +.internals .peers { width: 0px; height: 0px; position: absolute; left: -20px; top: -20px; text-align: center; } +.internals .peer { left: 0px; top: 0px; position: absolute; } +.internals .peer .icon { width: 20px; height: 20px; padding: 10px; display: inline-block; text-decoration: none; left: 200px; position: absolute; color: #666; } +.internals .peer .icon:before { content: "\25BC"; position: absolute; margin-top: 3px; margin-left: -1px; opacity: 0; transition: all 0.3s } +.internals .peer .icon:hover:before { opacity: 1; transition: none } +.internals .peer .line { + width: 187px; border-top: 1px solid #CCC; position: absolute; top: 20px; left: 20px; + transform: rotateZ(334deg); transform-origin: bottom left; +} \ No newline at end of file diff --git a/plugins/Sidebar/media/Menu.coffee b/plugins/Sidebar/media/Menu.coffee new file mode 100644 index 000000000..3e19fd9f0 --- /dev/null +++ b/plugins/Sidebar/media/Menu.coffee @@ -0,0 +1,49 @@ +class Menu + constructor: (@button) -> + @elem = $(".menu.template").clone().removeClass("template") + @elem.appendTo("body") + @items = [] + + show: -> + if window.visible_menu and window.visible_menu.button[0] == @button[0] # Same menu visible then hide it + window.visible_menu.hide() + @hide() + else + button_pos = @button.offset() + left = button_pos.left + @elem.css({"top": button_pos.top+@button.outerHeight(), "left": left}) + @button.addClass("menu-active") + @elem.addClass("visible") + if @elem.position().left + @elem.width() + 20 > window.innerWidth + @elem.css("left", window.innerWidth - @elem.width() - 20) + if window.visible_menu then window.visible_menu.hide() + window.visible_menu = @ + + + hide: -> + @elem.removeClass("visible") + @button.removeClass("menu-active") + window.visible_menu = null + + + addItem: (title, cb) -> + item = $(".menu-item.template", @elem).clone().removeClass("template") + item.html(title) + item.on "click", => + if not cb(item) + @hide() + return false + item.appendTo(@elem) + @items.push item + return item + + + log: (args...) -> + console.log "[Menu]", args... + +window.Menu = Menu + +# Hide menu on outside click +$("body").on "click", (e) -> + if window.visible_menu and e.target != window.visible_menu.button[0] and $(e.target).parent()[0] != window.visible_menu.elem[0] + window.visible_menu.hide() diff --git a/plugins/Sidebar/media/Menu.css b/plugins/Sidebar/media/Menu.css new file mode 100644 index 000000000..e2afa16eb --- /dev/null +++ b/plugins/Sidebar/media/Menu.css @@ -0,0 +1,19 @@ +.menu { + background-color: white; padding: 10px 0px; position: absolute; top: 0px; left: 0px; max-height: 0px; overflow: hidden; transform: translate(0px, -30px); pointer-events: none; + box-shadow: 0px 2px 8px rgba(0,0,0,0.3); border-radius: 2px; opacity: 0; transition: opacity 0.2s ease-out, transform 1s ease-out, max-height 0.2s ease-in-out; +} +.menu.visible { opacity: 1; max-height: 350px; transform: translate(0px, 0px); transition: opacity 0.1s ease-out, transform 0.3s ease-out, max-height 0.3s ease-in-out; pointer-events: all } + +.menu-item { display: block; text-decoration: none; color: black; padding: 6px 24px; transition: all 0.2s; border-bottom: none; font-weight: normal; padding-left: 30px; } +.menu-item-separator { margin-top: 5px; border-top: 1px solid #eee } + +.menu-item:hover { background-color: #F6F6F6; transition: none; color: inherit; border: none } +.menu-item:active, .menu-item:focus { background-color: #AF3BFF; color: white; transition: none } +.menu-item.selected:before { + content: "L"; display: inline-block; transform: rotateZ(45deg) scaleX(-1); + font-weight: bold; position: absolute; margin-left: -17px; font-size: 12px; margin-top: 2px; +} + +@media only screen and (max-width: 800px) { +.menu, .menu.visible { position: absolute; left: unset !important; right: 20px; } +} \ No newline at end of file diff --git a/plugins/Sidebar/media/Scrollbable.css b/plugins/Sidebar/media/Scrollbable.css index b11faea00..6e3e0b6a1 100644 --- a/plugins/Sidebar/media/Scrollbable.css +++ b/plugins/Sidebar/media/Scrollbable.css @@ -25,7 +25,7 @@ position: absolute; width: 7px; border-radius: 5px; - background: #151515; + background: #3A3A3A; top: 0px; left: 395px; -webkit-transition: top .08s; @@ -41,4 +41,4 @@ -moz-user-select: none; -ms-user-select: none; user-select: none; -} \ No newline at end of file +} diff --git a/plugins/Sidebar/media/Sidebar.coffee b/plugins/Sidebar/media/Sidebar.coffee index 9b3b7a256..938fe41be 100644 --- a/plugins/Sidebar/media/Sidebar.coffee +++ b/plugins/Sidebar/media/Sidebar.coffee @@ -1,21 +1,26 @@ class Sidebar extends Class - constructor: -> + constructor: (@wrapper) -> @tag = null @container = null @opened = false @width = 410 + @internals = new Internals(@) @fixbutton = $(".fixbutton") @fixbutton_addx = 0 + @fixbutton_addy = 0 @fixbutton_initx = 0 + @fixbutton_inity = 15 @fixbutton_targetx = 0 + @move_lock = null @page_width = $(window).width() + @page_height = $(window).height() @frame = $("#inner-iframe") @initFixbutton() @dragStarted = 0 @globe = null @preload_html = null - @original_set_site_info = wrapper.setSiteInfo # We going to override this, save the original + @original_set_site_info = @wrapper.setSiteInfo # We going to override this, save the original # Start in opened state for debugging if false @@ -26,39 +31,39 @@ class Sidebar extends Class initFixbutton: -> - ### - @fixbutton.on "mousedown touchstart", (e) => - if not @opened - @logStart("Preloading") - wrapper.ws.cmd "sidebarGetHtmlTag", {}, (res) => - @logEnd("Preloading") - @preload_html = res - ### # Detect dragging @fixbutton.on "mousedown touchstart", (e) => + if e.button > 0 # Right or middle click + return e.preventDefault() # Disable previous listeners - @fixbutton.off "click touchstop touchcancel" + @fixbutton.off "click touchend touchcancel" @fixbutton.off "mousemove touchmove" # Make sure its not a click @dragStarted = (+ new Date) @fixbutton.one "mousemove touchmove", (e) => mousex = e.pageX + mousey = e.pageY if not mousex mousex = e.originalEvent.touches[0].pageX + mousey = e.originalEvent.touches[0].pageY - @fixbutton_addx = @fixbutton.offset().left-mousex + @fixbutton_addx = @fixbutton.offset().left - mousex + @fixbutton_addy = @fixbutton.offset().top - mousey @startDrag() - @fixbutton.parent().on "click touchstop touchcancel", (e) => + @fixbutton.parent().on "click touchend touchcancel", (e) => + if (+ new Date) - @dragStarted < 100 + window.top.location = @fixbutton.find(".fixbutton-bg").attr("href") @stopDrag() @resized() $(window).on "resize", @resized resized: => @page_width = $(window).width() + @page_height = $(window).height() @fixbutton_initx = @page_width - 75 # Initial x position if @opened @fixbutton.css @@ -69,6 +74,7 @@ class Sidebar extends Class # Start dragging the fixbutton startDrag: -> + @move_lock = "x" # Temporary until internals not finished @log "startDrag" @fixbutton_targetx = @fixbutton_initx # Fallback x position @@ -85,7 +91,9 @@ class Sidebar extends Class @fixbutton.one "click", (e) => @stopDrag() @fixbutton.removeClass("dragging") - if Math.abs(@fixbutton.offset().left - @fixbutton_initx) > 5 + moved_x = Math.abs(@fixbutton.offset().left - @fixbutton_initx) + moved_y = Math.abs(@fixbutton.offset().top - @fixbutton_inity) + if moved_x > 5 or moved_y > 10 # If moved more than some pixel the button then don't go to homepage e.preventDefault() @@ -94,32 +102,57 @@ class Sidebar extends Class @fixbutton.parents().on "mousemove touchmove" ,@waitMove # Stop dragging listener - @fixbutton.parents().on "mouseup touchstop touchend touchcancel", (e) => + @fixbutton.parents().one "mouseup touchend touchcancel", (e) => e.preventDefault() @stopDrag() # Wait for moving the fixbutton waitMove: (e) => - if Math.abs(@fixbutton.offset().left - @fixbutton_targetx) > 10 and (+ new Date)-@dragStarted > 100 - @moved() + document.body.style.perspective = "1000px" + document.body.style.height = "100%" + document.body.style.willChange = "perspective" + document.documentElement.style.height = "100%" + #$(document.body).css("backface-visibility", "hidden").css("perspective", "1000px").css("height", "900px") + # $("iframe").css("backface-visibility", "hidden") + + moved_x = Math.abs(parseInt(@fixbutton[0].style.left) - @fixbutton_targetx) + moved_y = Math.abs(parseInt(@fixbutton[0].style.top) - @fixbutton_targety) + if moved_x > 5 and (+ new Date) - @dragStarted + moved_x > 50 + @moved("x") + @fixbutton.stop().animate {"top": @fixbutton_inity}, 1000 + @fixbutton.parents().off "mousemove touchmove" ,@waitMove + + else if moved_y > 5 and (+ new Date) - @dragStarted + moved_y > 50 + @moved("y") @fixbutton.parents().off "mousemove touchmove" ,@waitMove - moved: -> - @log "Moved" + moved: (direction) -> + @log "Moved", direction + @move_lock = direction + if direction == "y" + $(document.body).addClass("body-internals") + return @internals.createHtmltag() @createHtmltag() - $(document.body).css("perspective", "1000px").addClass("body-sidebar") + $(document.body).addClass("body-sidebar") + @container.on "mousedown touchend touchcancel", (e) => + if e.target != e.currentTarget + return true + @log "closing" + if $(document.body).hasClass("body-sidebar") + @close() + return true + $(window).off "resize" $(window).on "resize", => $(document.body).css "height", $(window).height() @scrollable() @resized() - $(window).trigger "resize" # Override setsiteinfo to catch changes - wrapper.setSiteInfo = (site_info) => + @wrapper.setSiteInfo = (site_info) => @setSiteInfo(site_info) - @original_set_site_info.apply(wrapper, arguments) + @original_set_site_info.apply(@wrapper, arguments) # Preload world.jpg img = new Image(); @@ -150,7 +183,7 @@ class Sidebar extends Class @setHtmlTag(@preload_html) @preload_html = null else - wrapper.ws.cmd "sidebarGetHtmlTag", {}, @setHtmlTag + @wrapper.ws.cmd "sidebarGetHtmlTag", {}, @setHtmlTag setHtmlTag: (res) => if @tag.find(".content").children().length == 0 # First update @@ -161,7 +194,6 @@ class Sidebar extends Class @when_loaded.resolve() else # Not first update, patch the html to keep unchanged dom elements - @log "Patching content" morphdom @tag.find(".content")[0], '
    '+res+'
    ', { onBeforeMorphEl: (from_el, to_el) -> # Ignore globe loaded state if from_el.className == "globe" or from_el.className.indexOf("noupdate") >= 0 @@ -170,22 +202,57 @@ class Sidebar extends Class return true } + # Save and forgot privatekey for site signing + @tag.find("#privatekey-add").off("click, touchend").on "click touchend", (e) => + @wrapper.displayPrompt "Enter your private key:", "password", "Save", "", (privatekey) => + @wrapper.ws.cmd "userSetSitePrivatekey", [privatekey], (res) => + @wrapper.notifications.add "privatekey", "done", "Private key saved for site signing", 5000 + return false + + @tag.find("#privatekey-forgot").off("click, touchend").on "click touchend", (e) => + @wrapper.displayConfirm "Remove saved private key for this site?", "Forgot", (res) => + if not res + return false + @wrapper.ws.cmd "userSetSitePrivatekey", [""], (res) => + @wrapper.notifications.add "privatekey", "done", "Saved private key removed", 5000 + return false + + animDrag: (e) => mousex = e.pageX - if not mousex + mousey = e.pageY + if not mousex and e.originalEvent.touches mousex = e.originalEvent.touches[0].pageX + mousey = e.originalEvent.touches[0].pageY - overdrag = @fixbutton_initx-@width-mousex + overdrag = @fixbutton_initx - @width - mousex if overdrag > 0 # Overdragged - overdrag_percent = 1+overdrag/300 + overdrag_percent = 1 + overdrag/300 mousex = (mousex + (@fixbutton_initx-@width)*overdrag_percent)/(1+overdrag_percent) - targetx = @fixbutton_initx-mousex-@fixbutton_addx - - @fixbutton[0].style.left = (mousex+@fixbutton_addx)+"px" - - if @tag - @tag[0].style.transform = "translateX(#{0-targetx}px)" + targetx = @fixbutton_initx - mousex - @fixbutton_addx + targety = @fixbutton_inity - mousey - @fixbutton_addy + + if @move_lock == "x" + targety = @fixbutton_inity + else if @move_lock == "y" + targetx = @fixbutton_initx + + if not @move_lock or @move_lock == "x" + @fixbutton[0].style.left = (mousex + @fixbutton_addx) + "px" + if @tag + @tag[0].style.transform = "translateX(#{0 - targetx}px)" + + if not @move_lock or @move_lock == "y" + @fixbutton[0].style.top = (mousey + @fixbutton_addy) + "px" + if @internals.tag + @internals.tag[0].style.transform = "translateY(#{0 - targety}px)" + + #if @move_lock == "x" + # @fixbutton[0].style.left = "#{@fixbutton_targetx} px" + #@fixbutton[0].style.top = "#{@fixbutton_inity}px" + #if @move_lock == "y" + # @fixbutton[0].style.top = "#{@fixbutton_targety} px" # Check if opened if (not @opened and targetx > @width/3) or (@opened and targetx > @width*0.9) @@ -193,6 +260,11 @@ class Sidebar extends Class else @fixbutton_targetx = @fixbutton_initx + if (not @internals.opened and 0 - targety > @page_height/10) or (@internals.opened and 0 - targety > @page_height*0.95) + @fixbutton_targety = @page_height - @fixbutton_inity - 50 + else + @fixbutton_targety = @fixbutton_inity + # Stop dragging the fixbutton stopDrag: -> @@ -207,42 +279,57 @@ class Sidebar extends Class # Move back to initial position if @fixbutton_targetx != @fixbutton.offset().left # Animate fixbutton - @fixbutton.stop().animate {"left": @fixbutton_targetx}, 500, "easeOutBack", => + if @move_lock == "y" + top = @fixbutton_targety + left = @fixbutton_initx + if @move_lock == "x" + top = @fixbutton_inity + left = @fixbutton_targetx + @fixbutton.stop().animate {"left": left, "top": top}, 500, "easeOutBack", => # Switch back to auto align if @fixbutton_targetx == @fixbutton_initx # Closed @fixbutton.css("left", "auto") else # Opened - @fixbutton.css("left", @fixbutton_targetx) + @fixbutton.css("left", left) $(".fixbutton-bg").trigger "mouseout" # Switch fixbutton back to normal status - # Animate sidebar and iframe - if @fixbutton_targetx == @fixbutton_initx - # Closed - targetx = 0 - @opened = false + @stopDragX() + @internals.stopDragY() + @move_lock = null + + stopDragX: -> + # Animate sidebar and iframe + if @fixbutton_targetx == @fixbutton_initx or @move_lock == "y" + # Closed + targetx = 0 + @opened = false + else + # Opened + targetx = @width + if @opened + @onOpened() else - # Opened - targetx = @width - if not @opened - @when_loaded.done => - @onOpened() - @opened = true + @when_loaded.done => + @onOpened() + @opened = true - # Revent sidebar transitions + # Revent sidebar transitions + if @tag @tag.css("transition", "0.4s ease-out") @tag.css("transform", "translateX(-#{targetx}px)").one transitionEnd, => @tag.css("transition", "") if not @opened @container.remove() @container = null - @tag.remove() - @tag = null + if @tag + @tag.remove() + @tag = null - # Revert body transformations - @log "stopdrag", "opened:", @opened - if not @opened - @onClosed() + # Revert body transformations + @log "stopdrag", "opened:", @opened + if not @opened + @onClosed() onOpened: -> @@ -250,139 +337,242 @@ class Sidebar extends Class @scrollable() # Re-calculate height when site admin opened or closed - @tag.find("#checkbox-owned").off("click").on "click", => + @tag.find("#checkbox-owned, #checkbox-autodownloadoptional").off("click touchend").on "click touchend", => setTimeout (=> @scrollable() ), 300 # Site limit button - @tag.find("#button-sitelimit").off("click").on "click", => - wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), => - wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000 + @tag.find("#button-sitelimit").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "siteSetLimit", $("#input-sitelimit").val(), (res) => + if res == "ok" + @wrapper.notifications.add "done-sitelimit", "done", "Site storage limit modified!", 5000 + @updateHtmlTag() + return false + + # Site autodownload limit button + @tag.find("#button-autodownload_bigfile_size_limit").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "siteSetAutodownloadBigfileLimit", $("#input-autodownload_bigfile_size_limit").val(), (res) => + if res == "ok" + @wrapper.notifications.add "done-bigfilelimit", "done", "Site bigfile auto download limit modified!", 5000 @updateHtmlTag() return false # Database reload - @tag.find("#button-dbreload").off("click").on "click", => - wrapper.ws.cmd "dbReload", [], => - wrapper.notifications.add "done-dbreload", "done", "Database schema reloaded", 5000 + @tag.find("#button-dbreload").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "dbReload", [], => + @wrapper.notifications.add "done-dbreload", "done", "Database schema reloaded!", 5000 @updateHtmlTag() return false # Database rebuild - @tag.find("#button-dbrebuild").off("click").on "click", => - wrapper.notifications.add "done-dbrebuild", "info", "Database rebuilding...." - wrapper.ws.cmd "dbRebuild", [], => - wrapper.notifications.add "done-dbrebuild", "done", "Database rebuilt!", 5000 + @tag.find("#button-dbrebuild").off("click touchend").on "click touchend", => + @wrapper.notifications.add "done-dbrebuild", "info", "Database rebuilding...." + @wrapper.ws.cmd "dbRebuild", [], => + @wrapper.notifications.add "done-dbrebuild", "done", "Database rebuilt!", 5000 @updateHtmlTag() return false # Update site - @tag.find("#button-update").off("click").on "click", => + @tag.find("#button-update").off("click touchend").on "click touchend", => @tag.find("#button-update").addClass("loading") - wrapper.ws.cmd "siteUpdate", wrapper.site_info.address, => - wrapper.notifications.add "done-updated", "done", "Site updated!", 5000 + @wrapper.ws.cmd "siteUpdate", @wrapper.site_info.address, => + @wrapper.notifications.add "done-updated", "done", "Site updated!", 5000 @tag.find("#button-update").removeClass("loading") return false # Pause site - @tag.find("#button-pause").off("click").on "click", => + @tag.find("#button-pause").off("click touchend").on "click touchend", => @tag.find("#button-pause").addClass("hidden") - wrapper.ws.cmd "sitePause", wrapper.site_info.address + @wrapper.ws.cmd "sitePause", @wrapper.site_info.address return false # Resume site - @tag.find("#button-resume").off("click").on "click", => + @tag.find("#button-resume").off("click touchend").on "click touchend", => @tag.find("#button-resume").addClass("hidden") - wrapper.ws.cmd "siteResume", wrapper.site_info.address + @wrapper.ws.cmd "siteResume", @wrapper.site_info.address return false # Delete site - @tag.find("#button-delete").off("click").on "click", => - wrapper.displayConfirm "Are you sure?", "Delete this site", => - @tag.find("#button-delete").addClass("loading") - wrapper.ws.cmd "siteDelete", wrapper.site_info.address, -> - document.location = $(".fixbutton-bg").attr("href") + @tag.find("#button-delete").off("click touchend").on "click touchend", => + @wrapper.displayConfirm "Are you sure?", ["Delete this site", "Blacklist"], (confirmed) => + if confirmed == 1 + @tag.find("#button-delete").addClass("loading") + @wrapper.ws.cmd "siteDelete", @wrapper.site_info.address, -> + document.location = $(".fixbutton-bg").attr("href") + else if confirmed == 2 + @wrapper.displayPrompt "Blacklist this site", "text", "Delete and Blacklist", "Reason", (reason) => + @tag.find("#button-delete").addClass("loading") + @wrapper.ws.cmd "siteblockAdd", [@wrapper.site_info.address, reason] + @wrapper.ws.cmd "siteDelete", @wrapper.site_info.address, -> + document.location = $(".fixbutton-bg").attr("href") + + return false # Owned checkbox - @tag.find("#checkbox-owned").off("click").on "click", => - wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")] + @tag.find("#checkbox-owned").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")] # Owned checkbox - @tag.find("#checkbox-autodownloadoptional").off("click").on "click", => - wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")] + @tag.find("#checkbox-autodownloadoptional").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "siteSetAutodownloadoptional", [@tag.find("#checkbox-autodownloadoptional").is(":checked")] # Change identity button - @tag.find("#button-identity").off("click").on "click", => - wrapper.ws.cmd "certSelect" + @tag.find("#button-identity").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "certSelect" return false - # Owned checkbox - @tag.find("#checkbox-owned").off("click").on "click", => - wrapper.ws.cmd "siteSetOwned", [@tag.find("#checkbox-owned").is(":checked")] - # Save settings - @tag.find("#button-settings").off("click").on "click", => - wrapper.ws.cmd "fileGet", "content.json", (res) => + @tag.find("#button-settings").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "fileGet", "content.json", (res) => data = JSON.parse(res) data["title"] = $("#settings-title").val() data["description"] = $("#settings-description").val() json_raw = unescape(encodeURIComponent(JSON.stringify(data, undefined, '\t'))) - wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw)], (res) => + @wrapper.ws.cmd "fileWrite", ["content.json", btoa(json_raw), true], (res) => if res != "ok" # fileWrite failed - wrapper.notifications.add "file-write", "error", "File write error: #{res}" + @wrapper.notifications.add "file-write", "error", "File write error: #{res}" else - wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000 + @wrapper.notifications.add "file-write", "done", "Site settings saved!", 5000 + if @wrapper.site_info.privatekey + @wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: "content.json", update_changed_files: true} @updateHtmlTag() return false - # Sign content.json - @tag.find("#button-sign").off("click").on "click", => + + # Open site directory + @tag.find("#link-directory").off("click touchend").on "click touchend", => + @wrapper.ws.cmd "serverShowdirectory", ["site", @wrapper.site_info.address] + return false + + # Copy site with peers + @tag.find("#link-copypeers").off("click touchend").on "click touchend", (e) => + copy_text = e.currentTarget.href + handler = (e) => + e.clipboardData.setData('text/plain', copy_text) + e.preventDefault() + @wrapper.notifications.add "copy", "done", "Site address with peers copied to your clipboard", 5000 + document.removeEventListener('copy', handler, true) + + document.addEventListener('copy', handler, true) + document.execCommand('copy') + return false + + # Sign and publish content.json + $(document).on "click touchend", => + @tag?.find("#button-sign-publish-menu").removeClass("visible") + @tag?.find(".contents + .flex").removeClass("sign-publish-flex") + + @tag.find(".contents-content").off("click touchend").on "click touchend", (e) => + $("#input-contents").val(e.currentTarget.innerText); + return false; + + menu = new Menu(@tag.find("#menu-sign-publish")) + menu.elem.css("margin-top", "-130px") # Open upwards + menu.addItem "Sign", => inner_path = @tag.find("#input-contents").val() - if wrapper.site_info.privatekey - # Privatekey stored in users.json - wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: inner_path, update_changed_files: true}, (res) => - wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 + @wrapper.ws.cmd "fileRules", {inner_path: inner_path}, (res) => + if @wrapper.site_info.privatekey + # Privatekey stored in users.json + @wrapper.ws.cmd "siteSign", {privatekey: "stored", inner_path: inner_path, update_changed_files: true}, (res) => + if res == "ok" + @wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 + else if @wrapper.site_info.auth_address in res.signers + # ZeroID or other ID provider + @wrapper.ws.cmd "siteSign", {privatekey: null, inner_path: inner_path, update_changed_files: true}, (res) => + if res == "ok" + @wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 + else + # Ask the user for privatekey + @wrapper.displayPrompt "Enter your private key:", "password", "Sign", "", (privatekey) => # Prompt the private key + @wrapper.ws.cmd "siteSign", {privatekey: privatekey, inner_path: inner_path, update_changed_files: true}, (res) => + if res == "ok" + @wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 + + @tag.find(".contents + .flex").removeClass "active" + menu.hide() + + menu.addItem "Publish", => + inner_path = @tag.find("#input-contents").val() + @wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false} + @tag.find(".contents + .flex").removeClass "active" + menu.hide() + + @tag.find("#menu-sign-publish").off("click touchend").on "click touchend", => + if window.visible_menu == menu + @tag.find(".contents + .flex").removeClass "active" + menu.hide() else - # Ask the user for privatekey - wrapper.displayPrompt "Enter your private key:", "password", "Sign", (privatekey) => # Prompt the private key - wrapper.ws.cmd "siteSign", {privatekey: privatekey, inner_path: inner_path, update_changed_files: true}, (res) => + @tag.find(".contents + .flex").addClass "active" + @tag.find(".content-wrapper").prop "scrollTop", 10000 + menu.show() + return false + + $("body").on "click", => + if @tag + @tag.find(".contents + .flex").removeClass "active" + + @tag.find("#button-sign-publish").off("click touchend").on "click touchend", => + inner_path = @tag.find("#input-contents").val() + + @wrapper.ws.cmd "fileRules", {inner_path: inner_path}, (res) => + if @wrapper.site_info.privatekey + # Privatekey stored in users.json + @wrapper.ws.cmd "sitePublish", {privatekey: "stored", inner_path: inner_path, sign: true, update_changed_files: true}, (res) => + if res == "ok" + @wrapper.notifications.add "sign", "done", "#{inner_path} Signed and published!", 5000 + else if @wrapper.site_info.auth_address in res.signers + # ZeroID or other ID provider + @wrapper.ws.cmd "sitePublish", {privatekey: null, inner_path: inner_path, sign: true, update_changed_files: true}, (res) => if res == "ok" - wrapper.notifications.add "sign", "done", "#{inner_path} Signed!", 5000 + @wrapper.notifications.add "sign", "done", "#{inner_path} Signed and published!", 5000 + else + # Ask the user for privatekey + @wrapper.displayPrompt "Enter your private key:", "password", "Sign", "", (privatekey) => # Prompt the private key + @wrapper.ws.cmd "sitePublish", {privatekey: privatekey, inner_path: inner_path, sign: true, update_changed_files: true}, (res) => + if res == "ok" + @wrapper.notifications.add "sign", "done", "#{inner_path} Signed and published!", 5000 return false - # Publish content.json - @tag.find("#button-publish").off("click").on "click", => - inner_path = @tag.find("#input-contents").val() - @tag.find("#button-publish").addClass "loading" - wrapper.ws.cmd "sitePublish", {"inner_path": inner_path, "sign": false}, => - @tag.find("#button-publish").removeClass "loading" + # Close + @tag.find(".close").off("click touchend").on "click touchend", (e) => + @close() + return false @loadGlobe() + close: -> + @move_lock = "x" + @startDrag() + @stopDrag() + onClosed: -> $(window).off "resize" $(window).on "resize", @resized $(document.body).css("transition", "0.6s ease-in-out").removeClass("body-sidebar").on transitionEnd, (e) => - if e.target == document.body - $(document.body).css("height", "auto").css("perspective", "").css("transition", "").off transitionEnd + if e.target == document.body and not $(document.body).hasClass("body-sidebar") and not $(document.body).hasClass("body-internals") + $(document.body).css("height", "auto").css("perspective", "").css("will-change", "").css("transition", "").off transitionEnd @unloadGlobe() # We dont need site info anymore - wrapper.setSiteInfo = @original_set_site_info + @wrapper.setSiteInfo = @original_set_site_info loadGlobe: => - console.log "loadGlobe", @tag.find(".globe").hasClass("loading") + console.log "loadGlobe", @tag.find(".globe")[0], @tag.find(".globe").hasClass("loading") if @tag.find(".globe").hasClass("loading") setTimeout (=> if typeof(DAT) == "undefined" # Globe script not loaded, do it first - $.getScript("/uimedia/globe/all.js", @displayGlobe) + script_tag = $(" + + diff --git a/plugins/UiConfig/media/css/Config.css b/plugins/UiConfig/media/css/Config.css new file mode 100644 index 000000000..98291d338 --- /dev/null +++ b/plugins/UiConfig/media/css/Config.css @@ -0,0 +1,68 @@ +body { background-color: #EDF2F5; font-family: Roboto, 'Segoe UI', Arial, 'Helvetica Neue'; margin: 0px; padding: 0px; backface-visibility: hidden; } +h1, h2, h3, h4 { font-family: 'Roboto', Arial, sans-serif; font-weight: 200; font-size: 30px; margin: 0px; padding: 0px } +h2 { margin-top: 10px; } +h3 { font-weight: normal } +h1 { background: linear-gradient(33deg,#af3bff,#0d99c9); color: white; padding: 16px 30px; } +a { color: #9760F9 } +a:hover { text-decoration: none } + +.link { background-color: transparent; outline: 5px solid transparent; transition: all 0.3s } +.link:active { background-color: #EFEFEF; outline: 5px solid #EFEFEF; transition: none } + +.content { max-width: 800px; margin: auto; background-color: white; padding: 60px 20px; box-sizing: border-box; padding-bottom: 150px; } +.section { margin: 0px 10%; } +.config-items { font-size: 19px; margin-top: 25px; margin-bottom: 75px; } +.config-item { transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); position: relative; padding-bottom: 20px; padding-top: 10px; } +.config-item.hidden { opacity: 0; height: 0px; padding: 0px; } +.config-item .title { display: inline-block; line-height: 36px; } +.config-item .title h3 { font-size: 20px; font-weight: lighter; margin-right: 100px; } +.config-item .description { font-size: 14px; color: #666; line-height: 24px; } +.config-item .value { display: inline-block; white-space: nowrap; } +.config-item .value-right { right: 0px; position: absolute; } +.config-item .value-fullwidth { width: 100% } +.config-item .marker { + font-weight: bold; text-decoration: none; font-size: 25px; position: absolute; padding: 2px 15px; line-height: 32px; + opacity: 0; pointer-events: none; transition: all 0.6s; transform: scale(2); color: #9760F9; +} +.config-item .marker.visible { opacity: 1; pointer-events: all; transform: scale(1); } +.config-item .marker.changed { color: #2ecc71; } +.config-item .marker.pending { color: #ffa200; } + + +.input-text, .input-select { padding: 8px 18px; border: 1px solid #CCC; border-radius: 3px; font-size: 17px; box-sizing: border-box; } +.input-text:focus, .input-select:focus { border: 1px solid #3396ff; outline: none; } +.input-textarea { overflow-x: auto; overflow-y: hidden; white-space: pre; line-height: 22px; } + +.input-select { width: initial; font-size: 14px; padding-right: 10px; padding-left: 10px; } + +.value-right .input-text { text-align: right; width: 100px; } +.value-fullwidth .input-text { width: 100%; font-size: 14px; font-family: 'Segoe UI', Arial, 'Helvetica Neue'; } +.value-fullwidth { margin-top: 10px; } + +/* Checkbox */ +.checkbox-skin { background-color: #CCC; width: 50px; height: 24px; border-radius: 15px; transition: all 0.3s ease-in-out; display: inline-block; } +.checkbox-skin:before { + content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; border-radius: 100%; margin-top: 2px; margin-left: 2px; + transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); +} +.checkbox { font-size: 14px; font-weight: normal; display: inline-block; cursor: pointer; margin-top: 5px; } +.checkbox .title { display: inline; line-height: 30px; vertical-align: 4px; margin-left: 11px } +.checkbox.checked .checkbox-skin:before { margin-left: 27px; } +.checkbox.checked .checkbox-skin { background-color: #2ECC71 } + +/* Bottom */ + +.bottom { + width: 100%; text-align: center; background-color: #ffffffde; padding: 25px; bottom: -120px; + transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); position: fixed; backface-visibility: hidden; box-sizing: border-box; +} +.bottom-content { max-width: 750px; width: 100%; margin: 0px auto; } +.bottom .button { float: right; } +.bottom.visible { bottom: 0px; box-shadow: 0px 0px 35px #dcdcdc; } +.bottom .title { padding: 10px 10px; color: #363636; float: left; text-transform: uppercase; letter-spacing: 1px; } +.bottom .title:before { content: "•"; display: inline-block; color: #2ecc71; font-size: 31px; vertical-align: -7px; margin-right: 8px; line-height: 25px; } +.bottom-restart .title:before { color: #ffa200; } + +.animate { transition: all 0.3s ease-out !important; } +.animate-back { transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; } +.animate-inout { transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; } \ No newline at end of file diff --git a/plugins/UiConfig/media/css/all.css b/plugins/UiConfig/media/css/all.css new file mode 100644 index 000000000..7bb0087aa --- /dev/null +++ b/plugins/UiConfig/media/css/all.css @@ -0,0 +1,125 @@ + + +/* ---- plugins/UiConfig/media/css/Config.css ---- */ + + +body { background-color: #EDF2F5; font-family: Roboto, 'Segoe UI', Arial, 'Helvetica Neue'; margin: 0px; padding: 0px; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; } +h1, h2, h3, h4 { font-family: 'Roboto', Arial, sans-serif; font-weight: 200; font-size: 30px; margin: 0px; padding: 0px } +h2 { margin-top: 10px; } +h3 { font-weight: normal } +h1 { background: -webkit-linear-gradient(33deg,#af3bff,#0d99c9);background: -moz-linear-gradient(33deg,#af3bff,#0d99c9);background: -o-linear-gradient(33deg,#af3bff,#0d99c9);background: -ms-linear-gradient(33deg,#af3bff,#0d99c9);background: linear-gradient(33deg,#af3bff,#0d99c9); color: white; padding: 16px 30px; } +a { color: #9760F9 } +a:hover { text-decoration: none } + +.link { background-color: transparent; outline: 5px solid transparent; -webkit-transition: all 0.3s ; -moz-transition: all 0.3s ; -o-transition: all 0.3s ; -ms-transition: all 0.3s ; transition: all 0.3s } +.link:active { background-color: #EFEFEF; outline: 5px solid #EFEFEF; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none } + +.content { max-width: 800px; margin: auto; background-color: white; padding: 60px 20px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; padding-bottom: 150px; } +.section { margin: 0px 10%; } +.config-items { font-size: 19px; margin-top: 25px; margin-bottom: 75px; } +.config-item { -webkit-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -moz-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -o-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -ms-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1) ; position: relative; padding-bottom: 20px; padding-top: 10px; } +.config-item.hidden { opacity: 0; height: 0px; padding: 0px; } +.config-item .title { display: inline-block; line-height: 36px; } +.config-item .title h3 { font-size: 20px; font-weight: lighter; margin-right: 100px; } +.config-item .description { font-size: 14px; color: #666; line-height: 24px; } +.config-item .value { display: inline-block; white-space: nowrap; } +.config-item .value-right { right: 0px; position: absolute; } +.config-item .value-fullwidth { width: 100% } +.config-item .marker { + font-weight: bold; text-decoration: none; font-size: 25px; position: absolute; padding: 2px 15px; line-height: 32px; + opacity: 0; pointer-events: none; -webkit-transition: all 0.6s; -moz-transition: all 0.6s; -o-transition: all 0.6s; -ms-transition: all 0.6s; transition: all 0.6s ; -webkit-transform: scale(2); -moz-transform: scale(2); -o-transform: scale(2); -ms-transform: scale(2); transform: scale(2) ; color: #9760F9; +} +.config-item .marker.visible { opacity: 1; pointer-events: all; -webkit-transform: scale(1); -moz-transform: scale(1); -o-transform: scale(1); -ms-transform: scale(1); transform: scale(1) ; } +.config-item .marker.changed { color: #2ecc71; } +.config-item .marker.pending { color: #ffa200; } + + +.input-text, .input-select { padding: 8px 18px; border: 1px solid #CCC; -webkit-border-radius: 3px; -moz-border-radius: 3px; -o-border-radius: 3px; -ms-border-radius: 3px; border-radius: 3px ; font-size: 17px; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; } +.input-text:focus, .input-select:focus { border: 1px solid #3396ff; outline: none; } +.input-textarea { overflow-x: auto; overflow-y: hidden; white-space: pre; line-height: 22px; } + +.input-select { width: initial; font-size: 14px; padding-right: 10px; padding-left: 10px; } + +.value-right .input-text { text-align: right; width: 100px; } +.value-fullwidth .input-text { width: 100%; font-size: 14px; font-family: 'Segoe UI', Arial, 'Helvetica Neue'; } +.value-fullwidth { margin-top: 10px; } + +/* Checkbox */ +.checkbox-skin { background-color: #CCC; width: 50px; height: 24px; -webkit-border-radius: 15px; -moz-border-radius: 15px; -o-border-radius: 15px; -ms-border-radius: 15px; border-radius: 15px ; -webkit-transition: all 0.3s ease-in-out; -moz-transition: all 0.3s ease-in-out; -o-transition: all 0.3s ease-in-out; -ms-transition: all 0.3s ease-in-out; transition: all 0.3s ease-in-out ; display: inline-block; } +.checkbox-skin:before { + content: ""; position: relative; width: 20px; background-color: white; height: 20px; display: block; -webkit-border-radius: 100%; -moz-border-radius: 100%; -o-border-radius: 100%; -ms-border-radius: 100%; border-radius: 100% ; margin-top: 2px; margin-left: 2px; + -webkit-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -moz-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -o-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); -ms-transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86); transition: all 0.5s cubic-bezier(0.785, 0.135, 0.15, 0.86) ; +} +.checkbox { font-size: 14px; font-weight: normal; display: inline-block; cursor: pointer; margin-top: 5px; } +.checkbox .title { display: inline; line-height: 30px; vertical-align: 4px; margin-left: 11px } +.checkbox.checked .checkbox-skin:before { margin-left: 27px; } +.checkbox.checked .checkbox-skin { background-color: #2ECC71 } + +/* Bottom */ + +.bottom { + width: 100%; text-align: center; background-color: #ffffffde; padding: 25px; bottom: -120px; + -webkit-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -moz-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -o-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); -ms-transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1); transition: all 0.8s cubic-bezier(0.86, 0, 0.07, 1) ; position: fixed; -webkit-backface-visibility: hidden; -moz-backface-visibility: hidden; -o-backface-visibility: hidden; -ms-backface-visibility: hidden; backface-visibility: hidden ; -webkit-box-sizing: border-box; -moz-box-sizing: border-box; -o-box-sizing: border-box; -ms-box-sizing: border-box; box-sizing: border-box ; +} +.bottom-content { max-width: 750px; width: 100%; margin: 0px auto; } +.bottom .button { float: right; } +.bottom.visible { bottom: 0px; -webkit-box-shadow: 0px 0px 35px #dcdcdc; -moz-box-shadow: 0px 0px 35px #dcdcdc; -o-box-shadow: 0px 0px 35px #dcdcdc; -ms-box-shadow: 0px 0px 35px #dcdcdc; box-shadow: 0px 0px 35px #dcdcdc ; } +.bottom .title { padding: 10px 10px; color: #363636; float: left; text-transform: uppercase; letter-spacing: 1px; } +.bottom .title:before { content: "•"; display: inline-block; color: #2ecc71; font-size: 31px; vertical-align: -7px; margin-right: 8px; line-height: 25px; } +.bottom-restart .title:before { color: #ffa200; } + +.animate { -webkit-transition: all 0.3s ease-out !important; -moz-transition: all 0.3s ease-out !important; -o-transition: all 0.3s ease-out !important; -ms-transition: all 0.3s ease-out !important; transition: all 0.3s ease-out !important ; } +.animate-back { -webkit-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; -moz-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; -o-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; -ms-transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important; transition: all 1s cubic-bezier(0.175, 0.885, 0.32, 1.275) !important ; } +.animate-inout { -webkit-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; -moz-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; -o-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; -ms-transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important; transition: all 0.6s cubic-bezier(0.77, 0, 0.175, 1) !important ; } + + +/* ---- plugins/UiConfig/media/css/button.css ---- */ + + +/* Button */ +.button { + background-color: #FFDC00; color: black; padding: 10px 20px; display: inline-block; background-position: left center; + -webkit-border-radius: 2px; -moz-border-radius: 2px; -o-border-radius: 2px; -ms-border-radius: 2px; border-radius: 2px ; border-bottom: 2px solid #E8BE29; -webkit-transition: all 0.5s ease-out; -moz-transition: all 0.5s ease-out; -o-transition: all 0.5s ease-out; -ms-transition: all 0.5s ease-out; transition: all 0.5s ease-out ; text-decoration: none; +} +.button:hover { border-color: white; border-bottom: 2px solid #BD960C; -webkit-transition: none ; -moz-transition: none ; -o-transition: none ; -ms-transition: none ; transition: none ; background-color: #FDEB07 } +.button:active { position: relative; top: 1px } +.button.loading { + color: rgba(0,0,0,0); background: #999 url(../img/loading.gif) no-repeat center center; + -webkit-transition: all 0.5s ease-out ; -moz-transition: all 0.5s ease-out ; -o-transition: all 0.5s ease-out ; -ms-transition: all 0.5s ease-out ; transition: all 0.5s ease-out ; pointer-events: none; border-bottom: 2px solid #666 +} +.button.disabled { color: #DDD; background-color: #999; pointer-events: none; border-bottom: 2px solid #666 } + + +/* ---- plugins/UiConfig/media/css/fonts.css ---- */ + + +/* Base64 encoder: http://www.motobit.com/util/base64-decoder-encoder.asp */ +/* Generated by Font Squirrel (http://www.fontsquirrel.com) on January 21, 2015 */ + + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: 400; + src: + local('Roboto'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAGfcABIAAAAAx5wAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHREVGAAABlAAAAEcAAABYB30Hd0dQT1MAAAHcAAAH8AAAFLywggk9R1NVQgAACcwAAACmAAABFMK7zVBPUy8yAAAKdAAAAFYAAABgoKexpmNtYXAAAArMAAADZAAABnjIFMucY3Z0IAAADjAAAABMAAAATCRBBuVmcGdtAAAOfAAAATsAAAG8Z/Rcq2dhc3AAAA+4AAAADAAAAAwACAATZ2x5ZgAAD8QAAE7fAACZfgdaOmpoZG14AABepAAAAJoAAAGo8AnZfGhlYWQAAF9AAAAANgAAADb4RqsOaGhlYQAAX3gAAAAgAAAAJAq6BzxobXR4AABfmAAAA4cAAAZwzpCM0GxvY2EAAGMgAAADKQAAAzowggjbbWF4cAAAZkwAAAAgAAAAIAPMAvluYW1lAABmbAAAAJkAAAEQEG8sqXBvc3QAAGcIAAAAEwAAACD/bQBkcHJlcAAAZxwAAAC9AAAA23Sgj+x4AQXBsQFBMQAFwHvRZg0bgEpnDXukA4AWYBvqv9O/E1RAUQ3NxcJSNM3A2lpsbcXBQZydxdVdPH3Fz1/RZSyZ5Ss9lqEL+AB4AWSOA4ydQRgAZ7a2bdu2bdu2bduI07hubF2s2gxqxbX+p7anzO5nIZCfkawkZ8/eA0dSfsa65QupPWf5rAU0Xzht5WI6kxMgihAy2GawQwY7BzkXzFq+mPLZJSAkO0NyVuEchXPXzjMfTU3eEJqGpv4IV0LrMD70DITBYWTcyh0Wh6LhdEgLR8O5UD3+U0wNP+I0/cv4OIvjvRlpHZ+SYvx/0uKd2YlP+t+TJHnBuWz/XPKmJP97x2f4U5MsTpC8+Efi6iSn46Qi58KVhP73kQ3kpgAlqEUd6lKP+jShKS1oSVva04FOdKYf/RnIMIYzgtGMZxLnucAlLnON69zkNne4yz3u84CHPOIxT3jKM17wkle85g0f+cwXvvKN3/whEjWYx7zms4CFLGIxS1jKMpazvBWsaCUrW8WqVrO6DW1vRzvb1e72so/97O8ABzrIwQ5xqMMd6WinOcNZrnCVq13jWte70e3udLd73edBD3nEox7zuCc8iZSIqiKjo9cExlKYbdEZclKIknQjRik9xkmSNHEc/9fY01Nr27Zt27Zt294HZ9u2bWttjGc1OHXc70Wt+tQb9fl2dkZmRuTUdBL5ExrDewn1Mq6YsX+YYkWOU23sksZYFqe7WqaGWapYtXfEp90vh3pH2dlViVSvy7kkRSnM9lH5BXZ8pBn+l7XcKrOvhzbaTm2xe8RZOy1uwak2imNvGn0TyD9qT5MvZ+9pMD2HUfsWy2QlhntyQyXYV+KW3CWVU/s0mJEba4Y9SZcv6HI3Xd6hy9t6yr6jYlfOOSpMVSlSVdVcC51jIVX5Df2ffCT5OLIN1FCt1JVZY9vnjME4TKBDgprStxk9W6ig0lXQmSfXWcC4CGv5vh4bsZn5LuzBf9g7VD4rKBcVbKBq+vPUmEod7Ig6WZo6owu6oR8GYIilaqglawT+w/xm3EruMWo8iW+p8x2+xw/4ET9hHzKom4ksnMN5XMBFXKJONnKQizz4YZbmCA5CEGqpThjCEYFIS3aiEG0DnRg74sQyxjHGMyYw+jjjIj8KojCKojhKojTKojwqojKqorE/z+nO2BO9MUb5nXGYgMn0nYrpmInZmIuF3GMLdtB7J713830v/mvJctXYflBTO6Vmlq4Wdljpdpj/4g/OOEzAPEt3FpBbhLV8X4+N2Mx8F/bgP5yLp9LTVMqgytdU+ZoqTzvjMAELmC/CZuzCHvyHffGqaZlqgmSkIBVpluk0xiRMwTTMwCzMYb20IuRTLDpZsjqjC7phAP6Dm/EI64/icTyBS+SykYNc5PEOfHCRHwVRGEVRHCVRGmVRHhVRGVU56yi/wiSFq6y261m9r1/kMOulwRqmUfQtyt3S1Rld0A0D8B/cjEvIRg5ykccb9cFFfhREYRRFcZREaZRFeVREZVTlbLT68emHkREchKA7eqI3a2Hy2Xq5eAxPgndPvgmSkYJUpLG/MSZhCqZhBmZhDuuuuqu0eqE3+tlqDbLd8jOarXYEByHojp7ojcG22xmK4RiJ0ZwJCe/NrRSxN/pFFVdhyb60bMuyzXbJXrNVlq04e8TuVVBhp0VYsn0S5P6T3nhKrpKCrp9qP1gan7daSjD1/znsjDdmSMpvWQGrZAMyL3Nbwu5Qonx2j70vH+MzZCqKrD1nhe0/ds522Xbzkdlnx6+5e0pgd7x9bdaW2Vv2qf9pyeb4M+x7xj6WpHz6u0gEYRevq7vQjvtftzNXs5aNxvqbsNS/XcmmBmHfev8pgvEFlML3OHh1nfG4nRVhaVc+EwL+XnZek0m3k3Y341tKUpLttxNy5dq9ircaImsp9rnt432+ZB+y70rwVqlsGd7sB2wQWbwvwo56K6fpefU+3n7Fw8teH3ZehL2hGwrLvrGddvL6ftLfzb23f0E3FHazgguvny2+Mj8XsJ721786zgWE/Q8XFfh3uJB8lq6AsA3IuDLbF7Dq7Q8i6907+Ky4q7133XyzN34gr4t9aU9fsz5QwUWIGiiCR4rlceTjCZHLE6oKqqIwVVd9RauxWpLroE4qoi48xdWdp4T6qL9KaiBPWQ3lKafhGqny2srzB6PljBAAAEbh9+U6QJyybXPPWLJt27bdmK8SLpPtsd/zr/dcdaRzuX3weR9dvqmfrnUrfz1hoBxMsVIeNjioHk+81YkvvurBH3/1Ekig+ggmWP2EEaYBIojQIFFEaYgYYjRMHHEaIYEEjZJEisZII03LZJChFbLI0iqFFGqNYoq1Timl2qCccm1SSaW2qKZa29RSqx3qqdcujTRqj2aatU8rvTpgiCEdMcKIjhljTCdMMKlTplnRuZAJ87LVl/yp7D78f4KMZCjjr5kYyEKmMvuoDGWu19rpAlV6GACA8Lf19Xp/uf89XyA0hH1uM0wcJ5HGydnNxdVdTm80YAKznTm4GLGJrPgTxr9+h9F3+Bf8L47foQzSeKRSixbJMnkSverlDibRndmS3FmD9KnKIK9EbXrWI4U55Fmc0KJ7qDDvBUtLii3rOU3W6ZVuuFpDd39TO7dYekVhRi/sUvGPVHbSys0Y+ggXFJDmjbSPzVqlk8bV2V3Ogl4QocQUrEM9VnQOGMJ49FMU79z28lXnNcZgFbzF8Yf+6UVu4TnPf8vZIrdP7kzqZCd6CF4sqUIvzys9f/cam9eY9oKFOpUzW5/Vkip1L9bg7BC6O6agQJOKr2BysQi7vSdc5EV5eAFNizNiBAEYhb/3T+ykje1U08RsYtu2c5X4Nrv3Wo+a54eAErb4Qg+nH08UUUfe4vJCE21Lk1tN9K0tLzbhbmyuNTECySQCj81jx+M8j0X+w+31KU1Z7Hp4Pn9gIItuFocAwyEPkIdk0SD3p4wyWpjhCAGiCFGAIUz7OghSo4I8/ehXf/pH5KlcFWpUE3nBr8/jPGIYi5GmJmjiGCsIMZcC7Q8igwAAeAE1xTcBwlAABuEvvYhI0cDGxJYxqHg2mNhZ6RawggOE0Ntf7iTpMlrJyDbZhKj9OjkLMWL/XNSPuX6BHoZxHMx43HJ3QrGJdaIjpNPspNOJn5pGDpMAAHgBhdIDsCRJFIXhcxpjm7U5tm3bCK5tKzS2bdu2bdszNbb5mHveZq1CeyO+/tu3u6oAhAN5dMugqYDQXERCAwF8hbqIojiAtOiMqViIRdiC3TiCW3iMRKZnRhZiEZZlB77Pz9mZXTiEwzmNS/mENpQ7VCW0O3Q+dNGjV8fr5T33YkwWk8t4Jr+pbhqaX8xMM98sNMvMerMpfyZrodEuo13TtGsxtmIPjuI2nsAyAzOxMIuyHDvyA34R7JrKJdoVG8rx9y54tb2u3jPvhclscpg82lXtz10zzGyzQLvWmY1Ju0D7yt5ACbsdb9ltADJJWkkpySUK2ASxNqtNZiOJrxPv2fHQJH6ScDphd8Lu64Out7oeujb62gR/pD/MH+oP8n/3v/PrAH56SeWH/dDlxSD+O+/IZzJU5v/LA/nX6PEr/N9cdP6e4ziBkziF0ziDbjiMa7iOG7iJW7iN7uiBO7iLe7iv7+6JXniIR3iMJ3iKZ+iNPkhAIixBMoS+6McwI4wyGZOjPw5xFAbgCAayMquwKquxOmtgEGuyFmuzDuuyHuuzAQZjCBuyERuzCZuyGZvrfw5jC7ZkK7ZmG7bFcIzg+/yAH/MTfsrPcBTHcBbPqauHXdmN7/I9fsiPOAYrORrrkQaa8FG4aSvBgJI2EBYjnSUiUwMHZJoslI9lUeCgLJYt8r1slV1yXHYHuskeOSLn5GjgsByT03JNzshZ6S7n5JLckctyRXqKLzflodwK9Jbb8lheyJNAH3kqryRBXssb6Ssx7jmG1cRAf7EA00sKyeDgkJoxMEoySSHJKYUdDFCLODiiFpWyUkrKORiolpcqUlmqOhikVpO6UlPqSX0Ag9UG0kwaSnNp4a54tpR27jHbSwcAw9WO8n7w2gfyYfD4I/lUPpbP5HMAR9UvpLN7zC4ORqpDHIxShzsYrU6VaQDGqEtkKYBx6pNAf4l1cFaNc/BcjRfr9oVySE6A76q5JDfAD9UqDiaoux1MVM87mKpedDAd8CAEOEitLXUADlC7Si+A3dVnov3sq76QGPffTGbJAmCOmkNyAZin5hEPwEI1v4MlajWpDmCp2tDBcvUXByvUGQ7HqDMdrFRny3wAq9QFDkerCx2sV5c52KCuEz2HjWqSTQA2A/kzOdj6B09lNjIAKgCdAIAAigB4ANQAZABOAFoAhwBgAFYANAI8ALwAxAAAABT+YAAUApsAIAMhAAsEOgAUBI0AEAWwABQGGAAVAaYAEQbAAA4AAAAAeAFdjgUOE0EUhmeoW0IUqc1UkZk0LsQqu8Wh3nm4W4wD4E7tLP9Gt9Eep4fAVvCR5+/LD6bOIzUwDucbcvn393hXdFKRmzc0uBLCfmyB39I4oMBPSI2IEn1E6v2RqZJYiMXZewvRF49u30O0HnivcX9BLQE2No89OzESbcr/Du8TndKI+phogFmQB3gSAAIflFpfNWLqvECkMTBDg1dWHm2L8lIKG7uBwc7KSyKN+G+Nnn/++HCoNqEQP6GRDAljg3YejBaLMKtKvFos8osq/c53/+YuZ/8X2n8XEKnbLn81CDqvqjLvF6qyKj2FZGmk1PmxsT2JkjTSCjVbI6NQ91xWOU3+SSzGZttmUXbXTbJPE7Nltcj+KeVR9eDik3uQ/a6Rh8gptD+5gl0xTp1Z+S2rR/YW6R+/xokBAAABAAIACAAC//8AD3gBjHoHeBPHFu45s0WSC15JlmWqLQtLdAOybEhPXqhphBvqvfSSZzqG0LvB2DTTYgyhpoFNAsumAgnYN/QW0et1ICHd6Y1ijd/MykZap3wvXzyjmS3zn39OnQUkGAogNJFUEEAGC8RAHIzXYhSr1dZejVFUCPBW1luL3sYGQIUOvVWSVn8XafBQH30AbADKQ300kQB7UpNCnSnUmfVuV1TMr1pMaCZW71Si7KoT82vrNi6X1SVYEa0ouNCPLqFJ8AFyIIN+T/dgzE0iUIokGJTUO69KpuBMMvmulUwJ9if980h/ILC56jecrksQA2l/AS6aDaI5OFmKat7bdan+r300lAkD0LoNugWfkJ7RNiFeTvHgv7fG/vdo5qh27UZl4kui486bLR98sO/99wOBPNFG3DKAyDiqC6qQppEoQRchTTUFVEFRzQH2NsFt90m8QUejsbgE6/BWmkLX4fd5vAECkwHEswxtfUiCghDaGAYwpgatwgYKG4TlUKoH9digHpejYQwHP0NtmJaogVAjkyoG1IZ8r3gbHWBia+bwxWhFrRPgrS2gmhU1Xr8rIaCCoibqM404fhfD7va77C725xP4n8/h1v/cApslQXqrW0G3H9DSgVJs2L2gO5q7L+9+4ssON+52W74RzR3oLVxHh+O6fBy8GDfTgfxvMd2YT4cTNw4GQBhT1Vq0yuuhOQwPSW9hYllqBE5hgxQuI0mxcHotihoT4K3CW82O9wQiilY3PEpR1KQAbz281Zreu8KESvd4PR5/ekam3+dISHC40z3uFNkRnyCyQbxscrj97LIvPsHXNkPoPXft+Y/2b31x2973c7Mnz1qAbbY/e/y91XvO7l6Zm1OIk/8zy/fo6S2vnom/es1ZcXLp69PHDJ86ZPLGEcWn7Pv3W788tLhwFkiQVfWtlCMdhFioBx5Ih3YwJSSrwMQTamR1s4Gbycq1JyqgRqVpVrEaNp/TEsMjt6I2DLD9Zj+0ZuHphorW5t5I87t1jfSnaZmCm//KTGvdxp6e4Wub4GCCulM8fqcupd+f7mEMYHpGsn4lOfIC50byojNra86C17bOnVeyqHfXTr16ru5J7t+K8rattJLPdO7Zq0unPtSURQ5niUU5JdvzOs3funWx6elhg3t0eXr48O6Vp3OKty3ulFO8dbH8zLAhPbo+M3TIc788JmY/BgIMq6oQf5EOQCPwgg8W/IUeNGCDBjWKn8gGiVwpUhpwpdCaWRrwTkhpxjulWQrvrKFJe+iWuqEuwVqXE9FA0ZLwHk+uJKuuWoy8sJpwojK5mnC6uFqYMIMphcnp9sqMusZS20w0ca0R4p2ZGRkhooa98Nqgxw5sKzzQZ+xIfPzxrdMD5YO6Hn7+PKV4cdU0usG1dW3KpEmPtx36ZPeBuDBLfWHS8k6vf7BzQe8Xuz9DZ87bVLXt9oTHOnz6xDgsTpw+b9Iy4fOBy//VutdD/6fPWEB4XnRBUPc5SsjjSNUeh4HlPibomIsvSivocvwEEBbQZuRFeSRYwQJqnTRV1DffZst0ykQwKfYEp8njJQum/jjXs3KvBZf2eMGzYGoFeeZT3IzPdZw2jqbTz3rQWfRmycDxXXfgcwAIHvbOzFrvxHhCTN4Mm92fTog3M8FmI5kv/DTfu24v6b1hsHf+D5NJh0/o8/T1LuMn4U+YlnwGs7BRt/FdaAkdCggNyCChh6RCHUgO7bvIdlfU9z1QlwWSRNXCektaIlsqNVNi7jnVKdlNguDFrvRMK2xlWRuFTVvRk4dm7Hl7pnCx75px2Ju+Mqbo3/Sn/phMv/w3R/40rBTTxXchGuoBe5kKuvuQMWxfurtzuKxuK3N2Vh/ZiIV0xB46Agv3CLE7aTqe2InFgNCQlmM6XAUzOPmbNPFeEOEvBc6yV3ct8XJuVn/xnSG0vHPO4q0rhh3jOFJJEokl74LAOGQ7p2GkY2ILk1iaiF+RpDWAsJzFsUlwmnFdP8SMiTFj0p2hFH4qk0crBw9Xy9tn339/dvtBrR95pHWrhx4CBFtVjqDokdAODFpkKGRPOt3o27WJDNw4U24JQGACs8IoZoWxbL32oRWj2M1R7Oaws+I2GKVoVjR4pkgpFOJOIYJfsfna2uxe3S5MVt2dZIpR5RVfXxfLv/u2XNg9v2DZPJK/OH+BQEbTvfQA+tH3Bz6K7ehZeij224sXyumlihvnbgJCCQC5LL0Hcg0uiUGR/pxsgMQNQkzThLB1E4FPspzCbZX8qT5yeQ9dTGwNxdP52w4DIPQDEH1Maic8BcaAa3i3MyLSBDRBcfKVFEWzhOcVHps0h1MJrefyY41fYDGmse5GEF2ir7Ij3hrXY9GERWt3o3D5eAVLa6aRqwtI69mbemSv3LDk6K3zuy7Si7QPIPSvqhBuM3SemogRywDF1qCrywZ1OTqI1f0apGkfA/bTNgGO19L4rwGA2WqsQdNj9cwNFM0TJsnuAf58XUVtEGCtlhS5oT4mhhKSosYZ8kgpJjcORUkupNeNuYtzCqumFOwOfnTqm+kjpuRUAR1Oq/YUzspdtn7VYqEtyc1GyB//5udX/jtAa+FRZx/4ovzdCYuW5MzOI0DADyB2Y7oaBXWgizEChN0ClxUtIseKzAGGhWJZDvIsRzPL0XpCqd/EwTvcukmjD11Wk5B77NieYBZZcjA4Fw8m4Ndr6A7sPlr4qbI9OdYEENYxG2jJUDSEQSEMyJZFhiFMPrcAVDQxzJ4pFjkiU5pWLzwpmeqxSc62NcB3ID4M1sSjN/MTduZvBEapzRFPWDT2+hKq2XSnmEynupJvgm+1GJl3+JtfrpT9at1pXT5p7qpN86d2aEOukAvb6YSH6e3rN2jwwoczZ6svrdzlbwIE5jP8DaRdEA8u5vPCKlxbAr7/GCkBVEvgiFQUrUGkHjjcsmi6Bxf8fgVSBWbcjholEJ5JuVQF8RMO7/vst1OnaSX2wn+dGbA56eWpMwtWSLs2iLduzKe/nrtBf8ZHg51wJRZLwXHZPR9/+9r7LxbuBmQWCGIqY1+GtkY7D28Fxy4pkQYO1QaO6OYeVEwNvvZf0qeyQrgkdb7zvpRYBCDAOMZLHd3KXdC8Zm8d7IUO9vawsnH98locnAsvsyUv9ovcUqGel+tWnFffWUukmagORUuJJCtkJKEsKyKTEHimpfOFes7ZNoPRVjFhcPaCqsCZ4NzsQeMqykq/W/PSnTWrcuatpt+MXrigfMEiMX10Ses2H0z+8PqNDybta9O6ZNT7ly5Vbpm2rujWsgKx3sKJY/Pzy5cAEBhaVSXc0uVsDL0hXO7USGlnAzuXUrBzO+FpBAj6L7tBRQ1OXY2u5RF4BqRLxLXB6lBAcvuZl0hlLt5fk00LD923ZeCsvcPHnsi7dJuq9M3G3s9/p9/329B449RpqwvInA7PzbiRt/KbGfRD+nUG7UWnSuvFL+9kP9f13Zt7175YBlVVkMsi4GjxcfCA7XdAE4tnfwgTQInwhIk8kLE7m7Ko3IPd6WX3fCJMQBmUGAAlIsvW7wSEzvCRME3sCjIkROgYu8r8up5LoeRAPzrQTLIrTzG3NT94AKevxGkHOL9FWCBcET4GAUyQCsxgWOKgkxhp3ZpYK6rzlEK4UrlPeIz/Ca22BEs3AyDkwgHhmvhEGIsenDkWKaBKHIuOxC/UD44UelaWkEUo7KO5K+mCUiDwRNVvwiS214nggmf/InYls0Ey3+v6UthY6itchUUF/jZ+QSh+seCVmXkvfmWEPL+Jpbzh8ngYaftUznNjsobP2E0+e/fDsy+P7lJWXS2vm7zouYUDRmdNHvXvlw8f37WzZNSzRfSj6vIZCIyg98sXpDXgh8fg/4LaNpSbmBlis14BBbS4tmYOMS5Nk8xx/JdZ0dqTsL0F1LaKVj88wUrWZgG1WZrmDs/FKdojJFJvmd/y6sqbmWHjEjkFmeclNnCliMQk20Q+cuoJPrHbbCxoizaU9dwl086ZkI/FXHpnrz9jcddlK+1xU/dnPTunW7p91fglsp3uptpReuTt6Jjl6D3d950HUh86mXWHFr0VE1OOM364jUN33P25zrO9HxjbGFu1e+SFtfj7z/SrbT3+9dXJ11BY3fzh4IUvr7+NC7DoMM37/RZdVdbCPcHb9gZuxfpox/d+uE770uXLioYPsOAfDb/nLDYAkBpKKpggCjrWzp5rHxfIbCBzdbCIRPdfkVqrRemToZIffehmvXAyuDH/EGmxjbQ8GHwKf7iFM+h8dujSjdQjxSBAMYCYp2fuCZAEPQzxsnb2BHqEdKZpceElzXE8ieKRSAkrIRpdjc/qCmccshvZkCUjrlRXKE66ivHadz9MHDopn35FD+ODuS/RT2kppsxas6SA3pTUA6XDNzR37Z5z4DopDv66eBqa1s0aNWU0AMJkFhEuSQcYhx2MftKY67ITkrgAd4A2g3OsGzliSRNXLtGdDFZ/OtcacLo9TF0Iq6ZteuJ7qT698T2l9OgKjNr5FSY6y+puLXz/9CFt8/YGeOrLu5iNGUuOY/prNPj5jvX0x7tLv6NfrXgbiM7yIcZyNDig/T9wzJmLCaNirMbW4lG0OVnkFk2ClXltVtoTbzG+tA8bb8JN9PKBs8fK//j6gqRuo8eO9jtFj71OJNvdxRhf1eMW2gkA6kg66kiehrBG/Sk/ixZlvq3RBqcoKoZsTdHMBhdpdTmq/4TrwXzyv8ohwqpgSzKZbAlWbpDUjbRF9fppbH0LPPIPuq5ZiBhW74j1ZeOK7ur1TgQ3lAq5wfvIEJITnMnXqgMI05h2XGPakQSD/7+04+/qIa1RKLo2Sns7rlFSI9Lv7YcbPcM6rWEEmlRZ5A7H61eA7ZLTTVwpRKjWHB46xGtd6R+qRivWEPRhwk1MSCrNoOVlh/H6/lEv++lOouwfkbUV04/Pxi444usL6KI/0arJv9FPWrfHTutD3Elmfe96GPfOUOYZFMqwqyrwqoGTusmC2VqaBftFbKheXXFKfaz1SeayYEppKSkvY9s3QFKDy0g215/3WDNZr0Yb/sORsf4uH04uLZVU/pSfVUAn2M84aGXMZ8PBm+Nj4KRIA+CpvzWUfvlCxacQXXb39OWfS/PnTV6Fknr39umK8iMzlxQuhGp+JJ2ficbMM1x411Y041kyEJ6FPmLtCn1hBEyDRbAOSmAPmPtp7YGRJUuEX7dnyB3lnvJweZKcKxfKr8vvypZ+DKtJJw99iG5SX2PkLfwq+BEZ8QV5bTeNZxS2JoHgzMqz1VbQgCGVoMk/WQFE6hfXdB+OIFrl0rINzJ6qJZa76967j5FXw9YYlMAQo8Mn1Xw5BFE/4A91URCqvizEx+SyoxvtrMcteA2v3S610ZRV1G0vZXvwH/FVFk4yydC7w8Si4KbgUY4trK0WeFLDKG5Axk0JA6mtPQbz1IgEOiq944qFnGYMqai7rIx8sl8cfHcjA7JWfB4ITKqqkCzM6q2QBO2N9baRiFglslASaxVK8aTantNDGYTDq5+JmHSTtmVKluX0lvoG/X0VWYnRb+zE6OX7A3vfPS2c3b3nhECKL9CybcXY/lTWGXxsezHdf56ggA767e8j79IbGBeE6qhQqlfLdnhKi4rXS5YonsBBmILahZMWLeCfXbMQjm0cPaeIeSFW37uro6zXhVmlpO4PGEf/+IMWY591r75aQNeT+4IsLv169NznG1bkz1svAIHRVVGSzPhzQApDZXY3DuVtat1qVFYGxGrYP45KMFv5fVZDVGXZXrKRU5NkSpX/jtdkRivmTkUxh57s3O0etyrjtvTkvndOC6dxIuf2LP2454mpv9ru8VtCy84j+8/J+b1Dr1fzuw1APKpbhxMGaVKifrwi8S8k/2B0hgpbU0JplmJIs6J1y+Aak2AMR9WkyyZ0uLGGd7KflpThp7+jZVUO9jwVHIPeguItRfQKeSr4lqRev5B3rG2wMIZ8s3rGwuUIgNCNxa1sfl7EUIO3CVvL4O6NH45UmR+ZsFarE0boqaeHb4+hHKzHP6ew1ljj8hKQbcSfvqFw7a9xu+ke0vOPG2i/Vvjt3LJta5dtWoMjTw6hFV8WUuaMPnql6OVCkt/p46I3bkw8MXX+mplj+0wfPv3VsbvOTzgye/7aGRde4FK1ARDX6HluK6M4RvplxRDyA9XE8gi6hrbYT1uKwyXbne8l20ZAWMKYKmHvtMEDmmSPZzIb3aDhBMoQa7Q6BnORwWRKAS9z36FzEKtYgrTqmu8HepPs27HllTcltTLlFL2jECSfCtcrPRt37tgoXAVAnr+LQf28o50GJl7vGBM8g9MzujZAQfdpqXqy7iPs69qZ4M2S4Oenq8Rdd7qF/OiDAPJ3uox9DG7B6EANphnOB2oUOo4N4nQfL0RxbyqHuli9YwQ4M9HHGjvH4TVxMPhZg6aY/DLWbZL0aRndtJOeczrp0Z10cykeL31TuFVpVg8IN+90E1PHjr17leFDaA8gntLj70gjBWE8tZ2w8UgcUOTx1ZILhfA6vAsiC7nVU/nyWrlY3i2zKQFkjt0iQwi7HnD1/31kPvb7lKbjxZt0HS36DC9R3w1hHmkVbBVMIe2CR0g5OcM5jWNI9zKkZmhjRBrGY0AaBhdajwdCHxmGM67QqFIadY2cJ1crxwZvkCRhBX9/TwBxmh77Hoe/Tz4ifYoI3NHwcwcpPGmRTGwyFPv9/AzCge2FR+9eExpV/iD8sWHDcnHexqV8vZX0CImW54AJUoAhVk2182YhUttZ+ORZM4nev58uxKnSV7enFJne5+9pwr41tKv51kDSIm2JPci1o4lKBqqSeptnMRZ6BHP0VVP1uzFNJZH4VTQm7HZ+hsKSCQtOo7llZfKcW52L5Dy+7iPkshCv25DXYENhVQ9oaOLGwheRuFOornBL9r2BzWdjs+3iXtqIXAw2BQSxKksoAgAB6ke8pnZCJfHznKLKUcLqNWuAa694Ca9IFARwg4q8yMV+9z5foRI6WXo7jiQRwpM9vvyVTZR+wh7zgB43K4RvxKehETSBqZqzaTO9WFbU5Opo42QgnIm19d9QYROnnnlF845HePZ4ZK1ti3ZWx50kw7GeOzKH93h5vsx9uu/edwv94MdpjXc69NM9dzI/2muiRM19a/NJxK/fnjh+SO6eCQcn7T0nemh0r/XuFfSNicndc99ZXLy3x6AJQzs9u6b33ldpnRd7K0v7di4/3GswEN33JssAdaAuDNVs9epzbDZFFQLAvFI4s0w0er1a5xiSWdCTzRjeqTG1S3SnMX1gJz8mnmNnJNusXi6dycrdtZh8s/TkOEvJ7nG46Mbulfnvdevx9oLVxHqLnl0xU4bgR4vpBRqUPjxVQluUnAKE/7C9qmB71RC6aEqjJLZ0xNFbYu3cBiIzGiYfP2SLZ60RHqfWV4dBBKu/mnG3R98AxjZ5aMhq805p0sEx/6N3J15e/e5P5p3mgqylL63LmdK337ah6EVI2vh73pUdWQuPl7r3HuMaNYCh/FEGiIN6jOHE+g04RYkhhuU0w6moIZE3opeEGJ1hveMM2//2s589neW2TsavmysRCf0DgkwrF2JAxf59Y3eXWMYe+uC73UW56rP/eiOviHhuY9o8kn4HJuZh+i3T+4GN+NPaMxx7P4b9F8awg3GcpZl1jjl7LPcKw0usbQD1zMDvq5f29v56H9cj/WodhigRH7tCd5qNOZiUAv57J9quhITQSSCmyCaX3+MhT12jFdP/N/fsN0G3+NaiwXm+8Xn08rgiG2lkzotH188pW4IF9BsafGrzwW6P9T4tHHtlVZ2lLwHCAwDkmOxg0gzR4hK4FUZI0ShSwRMjQ3Ft+TjfaEiPYyOdpWoPML3i5zzsJF7/1OA0hRSIfwD7cvv2PSWPPByV5u87+Msvhe0FY3fssxZasgZnF1T2AAIDaU/hZ8Z4XWgMOVpKqofzk8KTQzDAC9tfYmT9a+ODGjcV0hsup/b/uHsP8CiO5H24umdmV1mbFwSKC1qSESjawiByjiYbBJIJJgsRDrCQwRiTBAibIJJE8JGxEWPSioyJ4mxEOM5gnI/D2RecpW193T0rNL3Ahef7PekvPTubd7t7qqqr3nqrNtzJQjcRHlHt/DlmniIFYYp7RJjSfAG8O03jojC5SqsVq6yvz17MCdzz242Zn7bKmrV/cVHOmVPflK1bfOC5gXsXU/nyoqbLZ1d+euOfowfnrF6/LHM+SvzX0etb0Peb+D6+HED6xABgpnocZLHy82JKEFB4wevjd8LonbDacJ/tWUF6M5OaFMMiXa67PKRHnfIuoMGSB43PeX5JvMcjHS0i+d4U/KeZU7N6VzE2Bwa2DY9TznO+WhvVEBpGP5m55kjPrHtEHnANScigCDCMjr420OO5rOHxcjqKfqpNm+effRZw9WnSAw2l3xcCDmbDnHV4mMK4ffAE00tPsA6wo4aAwe/2BNWk6B1hU2ycO0VzgSUmgdogepD7rZNjktu0s6alpNKxpMrpld3IZcuagA795eMoulkGHxYgtg5yiAHouGbqgiymIqLWPxmDCeAYiz0d/FGYcgii/qDv6UchmIuGoFoQJk1zCstmeDyjUL/PyDB0+w76aQ5ZaICqkbPQaPKsdxkg2AyABhrAD82Keiyaxc6EAdgcCwAMs/nuMUuVuWUTNewJBk5Qt5p52+gdW82devROPe6lB/AEuMKvSgMEcL0O836czDik+iRVo2ewG644doXSlVnlXzyX+tYf0GiDZ0L+i0uCyx4c6eCR02cvf7t3FlnsbYrLZ0zPG+dNxBe+3VT1tZxeo0t0VmborwZbrOKsxIkIm/ijEQZzz5k1CNZrldNfrVArw9zLOrWS05ds1qsVHRRgGEa9jGQ6qnCoBx3UkPqRPg6rVR/D+2+AqlVwfuuKjDC6dMAYctQUQQ1Hji/hsPxPCj9C5jmfvXGP/FC2a/mKnXuWL92N3VvIMvI+CS2pXI4SqwIP3f3okvrRXeYBkSw5io8tAqaoVm1/tjL8RtBBXRQqrJzFPxxUQkRf6DE7tegLMVFnkiA6Q1Gfn72Q69kTmHvl3S88m5fsHtB/32vF2PwLuZHv/UW5O3s5uUt+l4/eWuutXHOT+xkkS/rBN4+Jop/xH3YOLuQWYfX9PY7/6G6kMXjxEXfj6wtncgKoQ1d2/itP8Ws7Bg/ZvqgEx1ejxq9M/j0ey7NRy6qAsltvYEvhnzXZxUV0BqHQWZXDWKZRB/gLg/XbEbj/jHURV7CPh8CX07e8TlzUpOWRdp5D0rBdqfWlNcZNXpDT818PA8R9tONyb47VBGpYjXC6BeKjKtWvIcCGUhxeUGtJQCPrm0pjK+hRbSCSXhvUcBD8Ga88l69xTyScSx7s6PPZgWP3y155Ycy0Cci+v/+XngWXcz1KwbTx81B0j/7PDpjR97Vjp9b0nDKkS4eObQbNGfz6geE7sjInD2RxXfW3eJDSFuwwUg1zOEVEo46ehFDnUU6NRqBjoZ8ksFAC9FNldBoLs2Nm5tnw027nYQvzfMxocXl5aruYp7t1mvvyhQtKW/J7oTe7XbuQdbZ1y/CWQmQABEvout+jJsJErRXFMESMTBiWuN3oCdka6Qo/xgdoyAbD0SAmkFRApUaTrr91GHku3+rsKZ0478oFfMbb6ecSyVp5EQBBLIBUJqc/HgMSRK7OIxiQImBAlF0ZcpLMXUFmn6yUMiovMiuIoCmAcpPeDIEsVQkN8/98Ub5FyX9y6AXBEt9ktKugYN84OAbEhmK1JsndKzzkwjryWzWsIxeP/blqbbXUqvKilFz1Jzm96rbUBBA0BpDK6diCob8wKB3qU+ffoz5BMoek+NUj6I6VbeSSxNAd9MvfPyAlaPLt33//C5pMSm7jA6jA+5X3I7SWTMQu7AQEDtJDKqWjCadeEZjM/iul8wCF08KcIwhjuq8nUwDTU20M2OV2pzgZhYCO4/uqi6TXmHuuTokjxsc1Ji+Xo3CpaWU0+acUuk7uOWaK3BwQDAGQ3qEjETGgOv8HGFA6nlO1Aw/0HpKSi4qWSHU3vMoxFPIGLjG0hjrQUrXWjeAzD02guqgjhkUbWRZLqo2iDPzDOQqckuxKSUxJSWURk5myRCiL3OLEsw++c+sWPvBO/PVdu6T3yRuJ909c+tfr/6w4+lnS9A7kb+VfDH3+/vvku/ZsBAcoJ6zjE5mqiPlQHdeuJf80nGKvttLxTvONV9HGyyCPOpQxH8y9WTMdr5mO11I7XsVi5uN1plKmchods4nGFQ6aEU+yx7Et3Wi9ajx8+Hr8QRXdunX4QGU7FHTvwYDnvrqKIjpMT/zMc+OH1/9VfuLzRPb9r6I35B+kOHBCe9XMcwNQ68g4OOZUGs4DfVuC3paF+9uyYCYizAI3x8wiG7l9djipsKTIPxxf2nX+nu5Neg/Ydqyg5/LStpE9R0qBJXdS1jSYOAJvfb/ttiA8YyRgKCDr0Vi5F48fEnXxA1QwaE1QaaHkBTNtYdCc1WVlrjqLG/bufljxgvdXfqv09EUNiNYwBFMmajzEwnMqxLnYnGu90Dr+wLGxQg99BHHow8ZsNzvWYUe1nj8AYtBqLzAVJwuvzRBQkO6jKQpiuLjK887l8oOedWcMGgiy6dU5Q1++EvHV13Go/j3XLRQZ+/knzlvraqAQBMMAZBZdxcJctb7/uB+B9qNtPK6LTlBHRtM8d2E0ylVPR6NM/WwE+iGr9gmo0NS9NJrRAR4/Q+S0GWONsYwml5bipluVJOzFlAqKzga0wR+hyl97NUrEATu2Bv50+dTHp+fljF8QiDLwlHsbhxUXB76aFfBRMZIvfX/r4MS5G/NJVTEApufmvjJM/gfUgyaQoeKmzbR9qdRdAeL+ZapgMS4WUECKRbn99i+30Z0WT7XEncZ9mDSnkXG/nEZkczgSOamZc6HkPluuX9uyaEHBuKmrF6wueff8lrULi6aMLVxYlTX9/Ofnc3MvTM09P33qwgVLFq/YXP7+m0VL1s2es37pxjevnt+yagnOy7v1Ut7NvJduzpl9i2lVNIBMkyXgqMkBOOiwHUISs76/vxhulZqqEOKgEz4Ubo224sxSKxM2elQtWEcPZvpoZEc1DNfKZQXH5Bnv317D/ef/KAmPRZM+JCPQ02Q+mk/mnyWLGPKMniEj7klheLu3Rf6OueQUaj93Rz6uYOdgNbVgvbgFM0IdZsOERJWqIKkp1TXqEDDXcHVZWRk1+c6qr6TL+GfA8Dwxy3OolCZDR5ivujp1phNiVT4ptYgoLw9iH+UI4NU8DpOaoaO5OzJ8MFkYFUgBcWnh4ky6FiY1rfbByLQW/CuYkPAqIiFC0AjezJGJT0l7yPFujqlM+JJ+cq0X6ZCjcEOKHWu3nVw+5DllnbqSqr9OvdK5oOzQ5iU7V14/cibzSPsuKPjjL5Hs2V2wctvTi1H0ntx072fP9+jbI/U1VL9Z7wEF6MDJgS2XjN596elnct/DC4pmZg0d36ZFzqacsiH04Z2XP38vf9P0Fzr1bde3a/Yr++rUs47p1Llv++fMtjGdhkxm52Gs/Hf8g3IBKMgHkYyhqauWYNlOo0nTAh7PaRhFw5obY33sxbe1a2UYJSxS69fUZwRBgmG0kutvynmuac/AWtWd3oqThZnMsWOqT+Oa05PVvEZaU+mdVO7DpzbXSLeHwqVoCWeqQc1TeeI+4RAEmYLoA2FBEi9ewkLg8/CeWo9n3UpTaXa8tuyrOdVgWX/6uD8sOvs+knZDm4Xy9i2U/NXAxSiPNJMeQxPpPsaCPPKtkuKTpzdt3f/GyGEjJk0aMTzTi7YiK2qLLFtLyHfbtpJvt0w/jnqg+aj78UPk8MUL5PARPHDDtptHppTe/OPaUQOX5eXOXjZgzML95MOdO1HD/XtR3K4d5N7ecvT8pUtkZ/kFsvv6NTSEawx+Rwrna9kQJqlh8W42szDGjRfp2aocb9fqOlguB8t2nujgV2zXt1OVrt3mzcHscU7JkPSJjhj9AtUkOlJZooOtjltbK5rm0LIcTJbxhBBDz/mzFuzaP2lupz7b9i99bWME+WPTIfWn9h+Kz8bFD5r7Ys7s5MWpSSEvLihcRM5n98trVG8lykgaQfnIY6FIGi29A/FQ+jsBI5SijtUEEMxDs6RTUgwoEMGzbaiCGjaRHcfcHU4YPlXmzZMy0CwUsA1keJ5K3n26WmEQBcnQGvaoqW24yqcyN4IdrfzoEhkgfhCZVagorFdbLBjDfXjKGVbjNMZaHJXJOFMclcmUmDhfHeHpFJR5CFJMKfTR6FqhbBSdwt9rKk2oKE1IYAWXrbEuVheFLM3GaLa1Mqgws8vJxcwbc9pd8cnueLc7SSuecT3vL27TqUBu3YZsxcXkWy6Q6MwKZNuwZ/5LyPx6mGSaXrq565Deo5fhO34yd4nJ5B4Ut38fimUy+RN5W+r3an5eu8SNrQfFmxp4zFnyfNw+tVtrAASzlVipPbfnZuDFJpLI6Zbae1NxuRJbCBgWSGfwXHpugsEBCeLys3LVkAQ1EAt8G2F1uOhxnXXWwEk2x4K1E8atXj1u/Lrq1O7dU9N69JDPjNu8afyEdescXZ5J79FnUnfAkA0g/ST/C4IhHDqzajQxog40Pa7OrTRU4HsoYQa2eQYr9RScKdbA8YK0pWgSWbOLzEOv7ELtqk5KHaRBReQFVFKEiitD17OVao834X3KcXDAADWAo8lQGyoJBC0b272wUEgV5tC0Xg2ofTyMV/LYHMyR5YuNauuoWImqLRzH4n3ePajZ5LbP9uhSvAsFbJw4oBQV4k2TUMTYTi1b93xm2pp5U8ZN7PM6IGiDC/FGpQziYaka424kjk8opWLjg7phWinVkRyYB4UgZaoZgHKPhEM0JICklVSxARtxLXk6rK6PyRxfq1E2XlOlRmqfV5eaID0VXdtSxaoqnxQ8rKpyu1DggO5dMzo/06P4zblLN3duv3bvkoU7S/p06Nxt8xB5TOsWT6UnNX4hb864tGF1GxdOyH954lPPPpuUy9m6efIHuH5NThrTnDRGmRrAcohNBWcyB1GiOWqJl1ayyP3ZT8mPaxVC7rL3b6TI3vdyOligrxoq8GN0MK4Ql3JgxOJPg5J15CdjqHZGzQ6O1mnJQo5Fov7oxRmX2pTtCszcu7ofBXS9i9/cvF6Kqbw4fXE30lS5Cwg6AEhtOeetqYqDQ8RM2iOUcwQBGunPTI0Oc1lizXjRgL+RX1DQ31AoDiC3/1z9e18209V4IpojdYNAcKiSj22IEw4G0HF/UO8eV9GaEsvVWoklvsNqLBMyqGDADNIL7QWWy26nKuEmcZ1MfqDtIavBZaDGE3GI4qDR9xWlSEMLYjURcGvuVhqKDNmwtdDYZ3DbF2KS672RnTsxOaFZk8BFjJ+Mt6MfeEVkWxUx1OiJhZE2sTAS+xdGst3GSAsj0Q/FH6BRFrwdD31m/kwATL9Dldw8TxRBv0XSsF2JuU+iiVOD6kmaF6OaJCEDL/mZucdWlxtfOrFx04nj5E+n3swe0H9kdv9+WVgeVfLu2Z3dt5w7t8Mwetr0Mb1HTZuSDXxfXS/Nlg5DPBwMBTDCQTQB2OMDAZTXlbfADReqP8Tr6bWK6kAAMsJlfBsATOLy8JqhvgDKFf4eFb6FAP7e23g9MsJFKYq/R+CA8ffkACjfKcf55xfx91yWGCRghEvQEm+qeU8sfU8sfw9g6EjmSbNpfF4H4mCwGqixIgNZ1QDLONa+nsXnYIrlSNZ/qs8pjaW7tz77FiYZjdqqJhk054ZV7/C4PoWJL+6JGmcdC8YzJo/O9+DPjp6/vXVye1+1Dt49Yd4fzo5qOHl67rBtf7ryzlsHcnu/gVpTr/epZjxj+E8A42DOwbbALJGB92TKuGo2gIbFPJH6rwaDr1ZAyNYL+5PFAL56WilWcrHtycovKFYyDq5aEe7903ufS1Olo95eNtzbe8yBz/5+AF2ORtlki1K6njQu8n6HZuOPAMFQeF/6SB4FwfA0r58PDJF8hQJBgdzrlqVAdoWCZJ+kKxWqUQ7iL9KwGitCaQg5ETIiNBR1J8dmoW6o2yxyDHWfRQ6Tw/ReX9QnjxzkB1Kah/qRAwASZRa/SSt1vgUnxEBjGKvKTZpyjWTeLjvGV4gFXOJKRpg4vuliVzxmq8cpJJECQbMB+yA13p+IzGgvafG8LoVnTIwOq2JzsiQFNirJbuSopSTvezV75apTjDd7e82LK7YsxVXNXsDJY3dSarJkf9r74bA5D/nJz216cAaN688YtPk7qo+Tu6N+XCEtyaEk2tAjr1YVtmU0Wgw7AeRMKjeh4GCSz30DrXmHyLUUfVQEwb4CX5N2y0TPlcAMEwmYsYlatMr8FqvZx51FWci5+t4s8usX5PuyMmRfuXUrrVUiH44/9/K5B+QSvdnB+3HR7LwixLKyNFM4wWCBJpRvEtu0mWhNo4TSSf9tJsjKkd8wxapl8PT1ojHacy7+HIONGokVEzUbv90Whe01VAdt62ehtuYgmFFHz7WyQxfm9zgx6OqRfofjm7ZcnDIxt/vJwQXjhtyVB1d8886W/KudkkauWtJzi9qs/qaYZiOeS85avazf0GsDRkwkH4IEvau/NcyVe9P5pUBruKhiHjkwB6B5BTs+8zieWSS9EynSDvzRMhzJXZwQxcmzjpR6E3IthHoWTpFvE8LZIBHai9P5VWk6fXH6tXS6F8YKmt8Q1YYV2iubVrB8ZoJgB1OpLioxboMujIuvjeOcnMVj11g8aRSTrg3qHJzQwwCK70nlknafr9h14ouPPpkybvzyY/88Pr00MePt8Te+9DYyvr12zZyEtiVVgV1LEv86c/kEqe/0tWYcsch2aNCIt4qK3x44MW9KP2vh4f79+wwm1V9NLz3dM3rJnHXdU7/DU/r3ypSS9xVEL1wNgOFlVlFuaAaR0JT6x8ZmT2k4fWmjCqh1PKP8ExvhdY2+6kczv6XG6RBHUZCQhULu+opcZzzD75gsUeROcnOszhf+S8m/zfxg0eJ7c6Zee+XNOS1W3O12ZuHRZ344cLLbOBxbMPz17bvm529Q7ORX8mJmiXfVK58uWv3Vgmnvrlgz6tVhLbekFrwyuupfT7fudnrX8vOfH2N2rQvsl5+Sy+itUHBCb9WoMeWNPPIwMsDXr80F6/EU4nN7Dhpq/Z+DppoHHdoNX5iFHvpe5oe35KeqIqS/ebdqzph2xEOOoXTulbVpU0V4C4yMDA2xeYmyAI5xNlk85WDJPAIolZkRZUeXyAbwYyS4dG1iXDLfeDm6K+vRXbVuvXDu4zPGZg1PgJtaMz8x3AJbNaNr8Nnc1JRheZ8VThnRbe7Yd+d+umrcoO5zR7/nyUaD23RdthuPHUz2p7Uv2EUJBN6CJmve20jOlJClrrVX16K0czn4SMzdw0dyvH3rfugBDGspl8D9GK5fiD+b8v+eQWB+hEHg5gwCT+65xxAIjFu95Qv9GQSRAAqrIrWCEybq0iiPlInYeBkwy6iYbPwW8538qJSlEu9dpXD43Vj7sJOTpUwcpA9nPa9qO0PQC0scJ5l9Aa+CFy1ixUH0iD86W/UC/ogy/laurAJWzCbDShRHPkZx3pXnAMEmxgGS0/04QHWewAEqK9MyshsB5AyekR0nit5/yXMqxbyrl4HW4hkoHnPacI2FFAn0tlrNDkhX1YsMPh+fn60kjdp0emJZ2TC04hPyLPryK/QeSZLTSSoq9/7Le5ONLw5Arsd37WFiPzIxB4xCuO+G+FlAQn2nREenr4LX+qHxtiMcrOK4e0O7wkswjSlpdGDjkZH8xgrU6LpLPQbkD/BeK8avN8lvgrf7xoSDDADB0F3XmSbqkd4gctC/GxM1SRW+Skbeni3Nzoga2gAmlZSUrVpVJo1pndfa68BvpuWl4c8BwXbSQ/4Hl8/nVYPN/vg6kUfdNosfY7BU1vvyamgYr8O3hPlS1ZzpyImOKSm+IjX5H/s2t04Na9h6iTeJFgS+R5nz3t1llo1hFV3kCZXraNHaenkcW5vXSQ/p73R3j4BsNZRp/39kX/HFs/h300J1tDBOTxwXuSU+9pjDqRsup5BxUlZa6Iyr7xzDuzbRUbvaL83JP9CPSvzGtyuuVv34x2OW4tBz+JeC+a9V3aKyj2Fc9TfGQN6pwgWvq6hBQ37iTKURFYLQ6Vbx39b6lYaJPgeEcX8sQbUJ7oXjSS0uQvTuNIs22IaK3eZkC7PlD8uTFY1kxDsaGQOrStVp28lyVEC2z90rdWYVy6x6uXJ57tjJk946h9+1r0Ph+1DKfmQustEi5mJvVb0weWX4/Wvk0s1v2O6UXf2tEei5i4FmkAzrVENKqi97G1/Bji2E3UkgRgikW73Pxs6lMYj7XC35VWnLBDVMbwx1THnVpr0ygl/xIEKfDCp96uGG5nDyY41b5eT+6qNMuIY+Byt7zocrl15p3e781GtfexONf1x0Ynb3pT8tfi+jzaVF98ivnq0FS7duW7Z4u/zUqHUOHLYUu7eSpTNHj51Ovpmx98KklxdOHT0qF7UggUc/+Mv7R+7cvv3msoj8dUzetwLgBQY7z3ZLPNst0kVFIRH0jhGkU2vI0XbzVlS6vdUAZ6Oko/Lbe07ZVwZ/VJnlY6ArFi6b0TBMhZhYvqNW/Lv+UIoWsSsJfkE7CFKmiElhhTUMiE1hVYxG6rKlJtH7DCZ305AsliW9PeQLclb68cePdhS0TnCUfImao9Gbyde79nwcXnXtpg0NRZ1mGhFG9dMjCkOHkMXk4IAL5PSREqR8GHf3r4Cq/0p64BN0raIgV7VFx9Ah6nIrUXrrJbr9IsGFdxYUM+BB+imynGN4BcvERAhpjFozkZrCiekP195oT8JZV3dvbJ0YFtWhXZd9+/CBba0GOOKf3SdflfZVkl1HLatDxw2X5cLZu07YVwe9+xIAZn0ClWJDGjihIfSnaSG3z5OLq/g3xbpqeKjMfWnOWg7VnwEmHHFPrtxlqcwkk+JwGvX1u2b5Vx4sk5/XIhYr/31TVuYu8ls2OnXtJC/iPX1Vi5F3ozbXRt9A7fZvMr66kLzTev/PMsLIUVPIG4FQDUu1TGZZbxedk1Wzg1ZmB0XNF9v3GGSrz06EVIhRJ5tTrD9r1TcVo8OfvKrpLHNFry3p0nbdtW7UF/2Y/MOza0XBrj0Fy3ZzB3RZwOj55KOkZXsc1AlFSZWUx/qhx3T47l3Q6igNkQYMEdBTDdHtPhY6VItQcVrfHxpGoRE+ox/AToxYEmtnI7ZRQ2vAj9RXTs/ecvAc+vFmN12N5Z+Dl66+cT3E+/IlUuWQxVJLzvlTwuVVUBeyVCOvN4InUBEFP+yRiNcewNfdzqBz1cDvaBxrsfUTA7YFGqC9DU5RwldvLZVryYAdO0bKqw6tlquO61mBr2JX10mAqg+RHmiMnA6h0EgE3gUfQ7BtSNA3NGbv+lbJTL26Usr95L2qplGrWX29/FfJYAAIgGSt5o86RjQtYIw2UkdSkVnAWbdUYbVrND+A6LVs4ska/gzvBEZDmhRrkmTYsG7thp+nyt8H7d0bgkxcHuQv8M9KNQRATG2G81A4ikb0s0FGfMUq6PIy/yvJLrmklCR0Zt1WkltZrAzcG0S+R5YgQPCKfBV/oPwFQiBeDeRWnoN24RLKVANrs5jcEaZKwNc95mHuBH+wg/y4s6hnt859lL/MWb1mduc+vbuwGgP5ezROOUdHV0fFgcxZ9KMI6GgBK3wsgME1lRMwRz6E3Ya+EAg2aKJKdp67krQeyJJvGdUMI8rkD/IA2FLD8OL0KoWPjuscds8dNjwv71geOdyhZYuOHVomtlfmD575h/0vvTQooWP7Fzp1ZquZSPqgN+BpMEFzlYJJvioVwYlTlYcw+5FwU7QpwSRlslQCjfn5Nu3rQIZeTs/t3SI5tPPzQ19clPfUsEFdI+Y0Gzdo6MantWzRHamN8iU4oQ2fCj9Dh8IDogMwnwzvH8wkPVxA+G2196h5dYpsNg7GRGGOO7TJG9742eym9Runz52T6Xo6Kym66TPKvUmLbG1CM1oaJy63pVs6PgUYRsgVUjOlmrNoWjHo4EkpK7br8CZZD6MhNkwjfdJYk8+SkiQXzrxG/rVn8oW765Rqch0lkOsckyET0Z+rD/N8bTKbb9tgkExSjNRCaispmVqnk7aBLQLbBvYNzAqUqeAGoky2y0kmXmbl1CVtKT+mxvd5eXT3Li9kdev5wuDkzi1auBom/rNzdlaXzpkjOrno3QaJyYC8I+Q7ZI1hBoTxWnYq0IAyueTQL2QamGDMMMqZdEoq0uisoeDTOncqk5w0Xzta7wzUo/OwHsa1G3v3QvKdDUpUb/eEFwe27htM5dz7NNlOrNV/gABfn1GjTsCVGgH3Pq1J+E+agLM8ynZcIK+Q4qAznLkDPd9ryx5bhQuUK9pjC2Hs2LZMXrLklmi2wQoBEKsGBAaJUVEUE8pAnz/EYgZO7EtORWETMqVj2QZr13mrl8wYexkQtJAdqIsBhM/R+3Iq8EaO+r6qBsOG8ZnSUZQtO7ouWLVqwehLgKABuY9awWEIgCjf5/yn5qwrxg+TPKPI/W7z3vjD6DHldJ7j5Jb4OJ1TPOwJYLmlPagDzy09KzvwIgPQx/eGsMf3ogxgUtSA3MSj4We+xi18NWSM6qhQa2B59Ls1qSqVmWXQjcMpDugjeizLJje7Lt3g+eOkm2359UQqtQiWYSeOk64yNJ1mnMN9FvFgUG2eUujtvCxn+LBpU0Zk5kjy4KmTMxsOnpIzBBBMgg04RjoMBparUqjpMyo1XYQZNsAaZUYhvILcQe4VOJ5MRwut6DWePVmPw7T3cbmVjMCtH1tTZGe87wfITe6sRJgQ6TDJs5I8tBIVAqJ6PEWaoMSBBIHsnfyr0tzI+eY4fGncFNYCmq1yKl6Fjys7JJqxA8CrwCpm3/iigY7P2ZhGS7E8i6LDUR8BKRrX5SBF4wQVdGxAAZuoASaYejfm5LDGvvq2I+H2aHuCXcrUUwnrspQNT+frmz+ywMnCgjaGWvpTPflFYGOxgNIZK9nJQamW8ynt3SlvLzY8pH0a0HCyR0b90e2ONdzPTvlL8o/WkD+P5i8BhbEmDam+/vEuiKfrclAH5osOmB97Uux7aQpx+lA1zls+FG6LtuFMNrEGCQzyrJPgk2ObgA1GV1AIlVc28+ax9RMoBkppRKz7vMyDoXCkp981ZhiMGu/k9T3uwIiHXVrtHI9DPjwuhV4YHscubpeSlBLbMMmNUlzK4E/o3zlylrxw5g79O4P6ocLTVdmoVfZdbPsTuUV6zpqFPx0n7V+/Zj1rpcwu9CaWvVVYrqpYs2bN+iNVD7Yw/d1FPVeJrlw0NILtqkuruncxzFqgn+oWsMb7iqJ3ovw5z2JNXpRJJECryqMBkxpr4x5EbIK+dD2qpre7QyTmIl+1i9NX7ULp0i6NOuVM4theTSdehdASGFcy6tZ57suFtgeXrnjQnPLvbIVl5ZUvnCkoWLyQRli6opijJ7H3qlJ65ggykN/JGyuK1q/EVB93V38bwHpHx0MqMKs3WB7Ir5+hh8Z81VzghqbQAlIgHY5C7cLU15ck+jeUEiIAsZ7GZqrHAV6ftDFpSq1gMifTuwLK6+Yy15TDeTame0zmGnEitiiciWyZKYbB+ETJpij28cmMpaY+E+Xrcun7TQMjbWshuSR+4QpLH7Wy57j0pcWyi9XldKY1ZAeU5HYb5cWo/6Sz09eWJXxF/jnjwBKycMWBmeTn+wlHXp9+ZgoatGTbF6hB2iHy0o408quUsaMZ+c0zNKRxdNVXgw2RjVDHTKfTKd1C90iD9efWkyj0ObvQm+wRdK+q/Bz7IzubqBcdzjNv4fr9cnKAVQ4CKCU8LqgHo3WC+m/rRQUoUs8NVsw1sAXoY3o1nPNgSsPZrkAFjFeKupluIoaU03QavaICiMsO7JY9Y3LISQ9a6kFtcl9EHrzjLTn97GnyJuo5bzaqGkmDj4sURD8+82V8wNv73HnOThrJ+xSfBxcsVu085hV1TjRNrkAH103BigcKVhxYJMy0N5wdmVWKpvY7Ojo6IVrK1FGvmH2P5lxJhx9BvxbWAslngSxQU0dv5ARxqR+ZLx/aMWOsbfbsX8kXBpX+BaHIf01YbJs85Y8HDWgeY4vjyHdvxG2NQg1RyNyl+ciAoqO3u66eyF8KMrPWygmqPXUhClzQCI6J3QXFPsfB+kSf2qAR4ghdgjq1AeWjQQNTg5gGUqau9Ri3G/TpSPZ0pCkyJpJNvfbp2ApmaqbGolw1JlasaYjhBObIGle6PifLN+BZkwZsTdkjFvYCvjkwqai10yncBNldTiM9GGKRm64UW69EFEs7dKIdZy7SP1z34Dep374r4XP3J5LlqKPsnYzXZnj3oqH7vZW4+4ASsps1FJNaFI0o+nHh1KLEZkU/o6PJI4qGovuDmMQ0AZB+pSsXAWPFDV/c0uoKeBtilkMbcqnkZxzYVK3cEoclCNB8oI936KKzMlIz62ItudxsN49Noz1S6EEq/7at+Urz9ZafP0TffeH9Hv2Wv9nuPdkcW1v8TB4kSMWKpd/MEvWQ93wIHp+PJg4vORVQAghiqr+XI+gcomCF2BBNBBmsZkUDr2lExXqmghNl6mdVt8LntDhZUwwtoeLXv9lewdQhlM/Qwowgm6cisBOiFLPWmZIF9AbOFGGpkBR6YVXwdqOdXsypFnOKHIFXkV8O9J30I/07U0n/Tl2RpNE3yKWdFvx8jpqzgV7QUFI9XZ2+gV68H2NkQoFDfN31v6HWygnDVahTV9Rz/9o+cTsVay2DuAUAgQkSwt02O/O5HGDmtUMsK2nALNywAHWrcfUDpHhwyWpP4RbskZDxE4+UG0tWkLtHL3+ClBhvMi6PJT99cPECikST464A5hoq8SqUaJgspiLEhKmB1yizNJwiCJzB15jhUHhQNKP06wZs48/a6bMmdmpDxF63gu+jteBjalTbDa6KHDx9jf7hul8jC/ntn9TE9iEH0fObtu8uJJQVTb5D1pKlxfjO91f//AAtRfFvLJ9XjADBblwgfSMxD7yeLk/pYBAc8mM1f8MovrigiHe6GYkGww8MydHFVJpjd6it3FfGmTVR1cMg5sL4rvhgn21dJ88b3nPYO6Ctp/Qe739SF15VA7RePwFs/v9THxSepXosG4WL0v/fDiksQ1u+b9+1k1P3Refnzhr/0Ue4W1kZ7ZQy/HB5682JEyeOKKximV7ez0X6is7HAcN1QGeUWOIu7l/iMC3+rXCNgoNsYCZJqyLXhuZ6iJxTprzUYm7Pyw8eePbtQ2cOjkFNPcoo242JdGx0qH9461jr3xsBINgir0TrDK0gAELoGLVTJgTiTSe2kjwDDK36j8pZsqDXW8AYpfTwg2QHA6ToyE8O/xaSsoIeoZKWYsZdFWmknESKoD0A3ifFPJ4b7vBPotgFbrjNHsa5kGG2x1PE2Zf+99zwxzLDq3/CG+no4iFXHJb46xoaJXwu6+Z1ZD6sgq0gZfozwMFYwwDHIgPcj/qtRsazLMz/CQMcXf03DHDM/HZ8XLI/8osajn/zixr4Mb+oEWzw/0UNKkSxbkQjDrMR9504sZgsNaA528jCT8yo6YI9e8ZiA3Gg2PqAoJBanmAp7om/dyMFexfiuczeSFAit8VTDNNA4h07pold/msgsgxjH+NIYw6DyHhXtSMZuA8eiSWfKWpr1nj6GdAHRgJj8AcIqGEo9QCMeiZVXaOelG90GUVk7+FJQgdP3pu2YHTXjqOyO3cdPTCpgYsDfIZpx/7SOXtEty7DKcaX2LJBfGJydXXNr/xgA5g5UtQQQP4r589Gwtj/7hdsrsmIcjrYYYuMcnXrxmpoQeh1pviltErr+8ycvuk3baDHiJ6s6ze1dpe2b9e1/u5C/nbl41/QV7c/RRF4YxGeV9sDHG8kErL8lsl6gJPo/7fmgoD+SawHU12YANTREvJtgv8hMpESmD8Wzg52E8dM7EIAjypUbKpp8xoioER1tJ6kYj8bzcDTABTPJQ+EdlF793pQXfkGuS80jZJvFBUV6bqihkNPHSfmkU6R4UGYh3JiX0fOgzIwT0To7FTh4wrxBU/hfaOlvQ9O377NmqeSZg+ktKorUloR6lhSQk4Aqv6R9vuYqrSFSJguNEvQ7eBibw8haEM+DF8FBWXqx2EWFi6A+0yKj3jH3F/0/zV2FeBx3Ep4dN7TnYOGMzc5s8PwHEOYmZMyM1zytYFXZmbm1hSnjD6XufUXfFRmZmau69snjeRZ7WkLHyS2/N9/o9nRrDSSZpRhYA6QvIA8IHW9uUA+/bQ3G8hrr+l8IA9fnerUwQ+25OqHL2bcdVUlhci4ULW0bxaBWWwMq4eYP9lvsl9UFKcMQB/JniA0jYZkfx+6ntBNsD2AeyA30eWEbofNbILFPcAx0Lyb0An4VXAXpHFnOz90lMj4KfFfSp9oY8vYdOsTA/gPaKzeJ65Qn4AIiGt1rFy0H52aJSsoiPYabD+WPef+LNqxTkBkmmgfqnQJ3WwGxMx7A6QdG30kOy8APcCHnkHoJrgiAJ3FTXSE0AnYJNAFaegcTzvuOwJ3KkozUsnu3kz8FMNKhrU0HQCh5Qb6SKgjNF2PSXKFdj8VaJRdo5vcaQHcUa7QLwn0PpEIoRPuGk92QvcRsseU7CprOlrOP7TldLMJtt615WCuc7TKWm3xK1ijRtNBimRZNBh9JHs3AF3uQzcSugk+D0JzE11J6Hb4mE2y0BWm3LyH0AlWIrgL0tA1Qi9jtF4w0zOO1vG6p8Np/JHPTMZQdht9JHuY0HSoIZnnQ9cTugk2BXAXcAPNuwmdgB+80UroIiF7hZYdsw2jNJO1NOcQP6VESPbV0mAe2XBKoGfrkfcigEbT4f7ksEwLrbkPDEAPN9EcNJpD0+EBWGYyf0HY9oRjYUf4sJtJigS0AEBBGnoM+6FjvNQJSbIHfaINfoS+1idGCC3W+z6xD34CPZho/FK075maJXO5iva52oNNRQ+GGUhRM/O1HjeTZuiAbjKOmrHRR7IdA9ClJpoDolGPewdgmcm8mZgTcBHpxkNXCd2M0v5LppQ6JCxHxwXIPutC1+dhJD6sJbkKINRgYI8scX2+S2K5wrpPC6zYl1dY9F3Vrs0cZQr9qEDPDm8idMLdWaAL0tB9GfkulUEQLWaFspj9HEuWPMWu8vqhvlfqpyOk871PJXpQZjD6SLZ3AHqwieaAaHw6hwZgfXJ8Qdj2Ax0LG/dhN5MUCbjGe5KErhAaGaE1glnKUO7ddC+3ktx07zaZg3Lb6CPZzoSmNVQy10RzQDT2cl+bGbVNzJuJOQGXeJITulBIXqYlxzxaKMteWpYSAJ/PIskJvVmjOSR2Ina8ByCxBYK91JyN8K9o/rIGtrIpkJtWlqHfG8bIDz9InmjN6ihizctOwzQWmSMDiLkFfmANFnN/H/MrihnR1wKzuIcLNFbqSi3FSl35UASHBGx10L4h6chXYkUe84lkmPPm7GfkxUpxik/X1co1bqPkx3oLIvoPATXgDUrxT+ib0Mhq7zjQrWerQl8bRY0vWd+LDgddspqtlyW/fk+EbsU85amlmKd8JDTAJX+Wmpz2Ant/GSp+GZqD+6JqJdAZcgr+RsLyoSKNYYZ5tHGUL315rZm46M/Tl6fposbLZl45MBKUzbzMU9A5Oq95pHp2UGJzT1/f6BTnrqvqi0V2UrNjHAVb2C4Q8+/3JOP6zY1ZxXHMzNXoWhozahVK7xDi3oW4m+CZIG5ucHNAbhztkwOYmclcRMyt7K4A5grHlLoLmRW6JEDqShYsdTN8xHa1uMv+QOrmlcxiLtfMWCMNZ9ZDNHMrm2nNkko0s9h7DA/nIaiGeYh+KuOFcK74ufMbmfIrHpdxCvGP/GntvU/H346H1na+Lf+EKcGWitbOp8Xf710a3ycu4vv7Suw7olX+s5e37uC/0bpjDVzGFkCuMRMnT0Jv+QdpRrBmT/JRdBkojljNHCkm5hZ4gs20mAf6mF9BZoU+F5jFXebjdoi7la0LWFvlOubcpAu5FXoSPntrboJVN29NLcXacSVwlOX99Gl0XzbgHOsKtDpsWaxDiFR0NeTLrtfH8xX5XvJeqjGX7g99Nefme+P9+p69jPpzNLzPOwxL0eENgdShmKO+CkbCcWCfEMFXruwErRrwLgIec46SkJ3DcvAE9DBxGXbY08OEMQ32upNjnk3vrFLIYv8N7yoeqU3rU7Wdxr43iX3Gh3PXM6+X+7+W+tGX0j7VpRPaP3Z4PXV69e4OK/u6zExvH9qgktsHrMeb4TY207KZbB48923+J0u3GBrTWIEPvcVw7eO22Z6I1pCYwR6ZFyoftxNY88caH/NoYm6B79mukOtn7ijXowKZcQwt1OhTaAwRd0eNRBN3EXG3spsCpK5xDKlxDC3U6Fqw5R7RK3ePK2sSKm4QfottTLVR3y8nlk1sOOzql1DPcihKgE9shNbrtzTKqdYMRVBwXh6ZLtCLNHoQmw6ZICYfHTHF6D4AEDouMooiFe3uJDbHioJEVJ/dZoHeN/yZWhsguhxCVp8jTKHvF+hT+G/EvcadQp7UO1MU1pI0CfTB4fuRW6ErgfvQhQb6C4GeGSkm7hZ3FZtpcUc0+jmBHhp+GbkVejmAxa3RUJjalR0T7lDcwGHDR5mCozu1lB2KT3Cxat0usbcJvjMjDsnRCoMC4kJ9tc08IN5evwpPimhZESs0EiTLhWIevQArfy3G9iXsW2yvExZ5WqROsI9ST5CdwOo0O11iTMY4sstbB6HxaO3XK7Rb675irSNytCy39rjhMPZytLbIK9AiLxSW2g9H41Ldno3tG2TtQhx5Y3S8rJqNtWKbUT0nktfnx2HccZlGF7KrfJYyGFeoJIusi4jc6jtX43fu0uPKPP3Igu1uN7arOopJLYvEv+h0QZY/FoPM0qru5CFABkTuHM4VP3fGo3KqIP65Nx4dHRWzhLujYsYwOjpVlI7ufDvK1t2/T/SI6MnRjHX3Ph19WwKWRuXkQX5iaXSfqJw8SIpvBJTmDWYfWtmjPZu1BG0clATY3thzP43lcRTxO5L9yOp9HpWi1rTGTuEaW6H3CPA2MU+fsgaj4kZ9PoN6u6DHlbn+FQu212K7kqWeZGlmeazBehMMNP0KB1rvNx/PLEnyKZogsQ7J/ZS7bzgPuNyxMSKC31BEcA18yqZBri8iqGc5tBJ/kFbtaw6m2RZt/QzSWGSOZBFzC8tn4y3mch/zK8iMaGHBzOKO+7gbiHsjWxUQx6yO/iBut5n8LvFvhE8CYgjlmT90DNafwCqGaB/1+omfErDzUOzZR+g5tI+dFRruB/C9uyR/lraPW3pcWSFRcaMdHIB2sLLHlfn0kQXb3Z+xXclST7I0QxtrsGQZpO3jACHLfzkgC9rHy8ySJIcpLNY8ROYG3csLWaNleUN1LzHrPvZyF41eTr3UqfclOtPkbiTuJrg6iJsb3ByQG2chewQwM82cWiwrNSKzij22AkiO1GxZFUBxYPte7i8S3+MSXun7SNTrPj0u4Wk8BkjeDHey8Zbkw/9A8ua1LF1yiu6OFZJcjU++UX/jwfiNmT2uzP0v2ndV7bAZ28eKnhIee3QJgMSnFoeuNfDHwtfYjvua+DwbteTtAZ6kv5IcKw58wY8F+lZ2Zfg8isyXU6y9HZ5kE6w4fr5jRrm+oIhY+56O9daLMTOK/xUxr4EuikARc0euHOfE/CAxr9mb/A1lz8uRWJJ5ADG3wNdeBIp2d/N9zK8gs0KfD8zijvm4LyXuNraQTbf2HvI5RdoUP9+D+NvgY+hrRf5ijvY39B119B0b2Szc37D2TjqKvO9w+oVd+o6N8A76NCtuiZfL8H5h6nis21kKK8E7GbZD0LqLMjYVysQsnU6uPHnjX4F15KbV7s3mPG1BZRX3PO/063uXUEvzzSqfZVe8N3HdvmrZtN9KZt1BFdGzj5wJdK7wT9ItxcUv8az05eMf3PrTacfFBn9WDta4yfHfwy5L61Da1dTsjOe8NeFNxv1UWgJenDjIV7bCdVVlURyjE/WscjOrT5/z074X1qBA77KHRleSz6XcNMmBTKFxzwu5Jys0XBa058WN+DEHih83VREzxY9jJjPvJuYEdJF9evOlLIfsU1XjxDfoFP22OJtkodUSzbCwbgO+W/bW6LKAmH0/fLdobv4LcbeyIwK4sx2Tuwu5FTozgDubGdyReuJuhptZg8U9kBvcHJAbvf90ZjHrp6NyAeKe96mqj6HtdpSI9kcx8xiO77M0+jhAbtPkk9O0RjBLXuQkgT5d6+9Tdoov6ie5R2huzOyE2j5XoxusnR16k2uLHUcWOys0IsBiY1HDYpF7D4Vm5wfMhQbY3LqXjwTMs/Jsbo0uDhoNJjfvJu4EzvEL0uQu9vaMNf9m4k/gfmSBT3YcEx2D/mCXeRb8GrCO6IPyW/s7An0B2GMuO9NbUU41VpTN7nz3VXtnyovk8hUoyVitm2tZvbUWztaSYDU1lGS5Rt9pr2goar5DapXcg6FzLDewkwF3clKr5K4G7Q7fAFsBtZJqdx5B/GRsv8l5BAD7H5Z1YrD/2B7ewT2AtPgwafFG5wE2x9JipqlFfgayKPQCyLK0mOXzieXE3Q4XsQmWT+znmE/oC/KJ7WWOD0saV5VCnTu4tI9yOBk6YkYO6T+vATQwJk/1yX9yM2I62U6W7xScw/tjGcj+HP+MlxW474Bf/7Qq7xW95UPrsL4XlmOozatlXnUv545HVSVRWVQ09SuLPPTo76t7i4o6z3WPwnKiA2RxUcbFObnfb9GVRdXc+r/YV4z8Qw1sZxtCc1kEZkKreyBEoXP0YB3BzwFwRuOzH4bPeLt7eupktKGlPhvawE7QNrTUZ0MbYBO235razZmD+KEaPwH6yEiowH+P+Pm6nQP8H+dLiG0AeAFVyIlBAzEUA1EjafSd9F8ApbIGcr3Zw/Ja6+t6vm/3rCXJZSo7SApPEpDdC7SinPG3dkFRYg6DhDaArzJJLFdQ1LOZGNtEcjIz2RQ2QAUqt626tEoiK/ZSR5J9xMzc9zDQItDftdSC+w9Alz7xTheekvJReeozPUxQQQjjcqJ/+cSLT+XVHgI57X3miegMwgkKrPUDInsISgAAAAEAAAACAADiktOWXw889QAbCAAAAAAAxPARLgAAAADQ206a+hv91QkwCHMAAAAJAAIAAAAAAAB4AWNgZGBgz/nHw8DA6flL+p8XpwFQBAUwzgEAcBwFBXgBjZQDsCXJEoa/qsrq897atu2xbdu2bXum79iztm3btm3bu72ZEbcjTow74o+vXZWZf2ZI6U3p4f4Ck9+V8/0S5ss3jJOpDI1vM0D+oI/rQz9/N3P84xwTRnKQLKCpW87BvgxH+wNZGhqzh74/SnWlqouqq6qMar1qtqqJariqt/ueue4GjpfdqS+9WSunMDc8RqPCqQyM5fXff3FFLMO4WI0rJFUN1utRTIw3c4U/mdtkIGWi6P2mXJH8rc9uVk1nbNwJ4xDd++VyH83lUU6Pp5HGfTmosD9VolBBnmVXeZK2/lCWh/ocp/x/aE/1cDbiJ+jzjvr9FFI5jc4yi25ShS7+MSrrve7Sn9T9QIn7IrtPdlH+wNmFwCIZqO8vpZPYdynd/C3Kw5Tn8H8ZwPzwPocngRPDbxwfnmAfZXt9p7r7ieuUe8YRzNLzRdJdc30pneLNytc51H3FCvmcjrq/vkkDOoUVrAgP0FeGMi1pqPevZLz/h5lSlx7+O2qqqvqZTJL5rA9fUMvvwwqt6Wi9PzFcpLqfvlrPNkkZmicVGKZ7qV2YmP0otelg+ZM7uVQeZFHyAE3leqbKMurpvzrJ2ayK6znY/ckGGcV6acYR/niOiIu4UJ8vK1xA/0Jteri/OT/O03zdkX0cp9JHlmssS0nlJ+b7kN0cHuaKUEIaBjLD8uivYYI/gTPCo0zyf9PVd2Qq/NPVffdP+VidC5NqLHXr6K46za3hKP8y/f1bVPYP6PmNLPR9GazqoLFV0hjLWu6SNhyaLOWy/43l8kIvKiQnkspUusU3OVSO4AQZzWGxPl1iM71ezuU+aJ2H6vkiKrt/OM9ylefS/hlWs0RrdK71hnk9dlGpZC6Yv/w52c/m2S1KfWweLpY/OXtffXy98gvVq7l/N5Z5t1jmXfPnFmWeVb8Wy/2ZPap1W618TnV37tWNZT4tlvnUZDHYvzemxWXrbZHau3F/ulm8to9t0frbemyL1BxZ/2m+btM4zlHeqjxb+bXyRc3nfu6H7C/llckabgtvUmJzwnxns8L6VZpygfpuhfIKZTujn8fZYnyGs20Ny8/GlIHZ3VYPy9PGtFlj/V7KVqXsZfPHZsA2aR6yOVHMR/i/1dvqsL20+WYzxjxidcvnnM2ajWk9bz1uMVh/599uzPxflkObszbr8vrnzzbhBRqTaTB75O/mNf4PGySVPAB4ATzBAxBbWQAAwNi2bfw4ebyr7UFt27ZtY1Dbtm3btu1Rd1ksVsN/J7O2sAF7GQdxTnIecBVcwG3NncBdzT3IfcT9ySvH68E7zCf8/vzbgv8ErQW3haWEtYUdhOOFm4QXRRnRJbFe3EV8RCKXVJQMljyXxqVlpL2lZ6QfZMVk/WTn5Q75YPltRTlFF8UmxSMlVk5Q7lF+UdlUGVUNVX/VLNU2dVo9QX1fU1SzRPNN20W7VftWR3VTdKv1Fn1T/XqD0dDDsNHoNHY0bjE+MeVNfU37TN/M2FzNPMl81SKztLBcs1LrHOt2WwPbeHvOPt++2n7CMcQxy3HJaXa2dD5w8VwVXT1dM1zn3Xx3ZXdtd1f3ePdSj8TT1rPcG/D28j7zLfEb/S38VwMgMC2wNsgOlg+OCF4NZUObw1XDg8KPI5UiW6KmaOvogei7mCtWItY+Ni52OPY9/n+8U3xN/H78NyNmtEyBqc30ZUYyU5mTzJuELBFOkESVxJVk1xQvpUqdSWfSqzMVMquyweyA7LMcPxfKTcjdy/3IB/Pd8g8LwQItzPt7GVCBbuAiNMLecBJcCvfAy/ANEiM9ciOAKqNmqD+ahlaiA+gm+oCl2IMhroJb4gF4Ol6FD+Nb+COREQ8BpCppRbqRQWQmWUMOkdvkI5VSD8W0Kv1TEDzACAEFAADNNWTbtvltZHPItm3btm3btn22hjPeGwbmgs3gJHgEfoIEmA9Whq1gJzgUzoab4ElUAB1CN9EHFI4ycQlcH3PcB4/HB/B1/BaH4HRSjNQlG2lJ2oBy2peOp8voXnqFvqbfaRzLy0qzRkyxAWwyW8UOsjPsOnvHfrEwlslL8Cq8ARe8Hx/GJ/Hl/A5/wb/waJFLFBLlRFNhRG8xTiwRu8Ul8VqEiHRZTFaS9SSTveU4uVTukZfkPflKfpNBMlUVVuVVbdVcEdVLDVIz1Xp1TN1Rn1WUzq0r6Ja6kz5tipo6hpheZoxZavaYy+aVCTQptpCtaaHtbkfZhXaHPW+f2f82xRV2tRxyPdxoN90tduvdbnfJvXQBLsmP8Qv9Wr/TH/UX/d0sCRMZsgAAAAABAAABnACPABYAVAAFAAEAAAAAAA4AAAIAAhQABgABeAFdjjN7AwAYhN/a3evuZTAlW2x7im3+/VyM5zPvgCtynHFyfsMJ97DOT3lUtcrP9vrne/kF3zyv80teca3zRxIUidGT7zGWxahQY0KbAkNSVORHNDTp8omRX/4lBok8VtRbZuaDLz9Hf+qMJX0s/ElmS/nVpC8raVpR1WNITdM2DfUqdBlRkf0RwIsdJyHi8j8rFnNKFSE1AAAAeAFjYGYAg/9ZDCkMWAAAKh8B0QB4AdvAo72BQZthEyMfkzbjJn5GILmd38pAVVqAgUObYTujh7WeogiQuZ0pwsNCA8xiDnI2URUDsVjifG20JUEsVjMdJUl+EIutMNbNSBrEYp9YHmOlDGJx1KUHWEqBWJwhrmZq4iAWV1mCt5ksiMXdnOIHUcdzc1NXsg2IxSsiyMvJBmLx2RipywiCHLNJgIsd6FgF19pMCZdNBkKMxZs2iACJABHGkk0NIKJAhLF0E78MUCxfhrEUAOkaMm8AAAA=) format('woff'); +} + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: bold; + src: + local('Roboto Medium'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEbcABAAAAAAfQwAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHUE9TAAABbAAABOQAAAv2MtQEeUdTVUIAAAZQAAAAQQAAAFCyIrRQT1MvMgAABpQAAABXAAAAYLorAUBjbWFwAAAG7AAAAI8AAADEj/6wZGN2dCAAAAd8AAAAMAAAADAX3wLxZnBnbQAAB6wAAAE/AAABvC/mTqtnYXNwAAAI7AAAAAwAAAAMAAgAE2dseWYAAAj4AAA2eQAAYlxNsqlBaGVhZAAAP3QAAAA0AAAANve2KKdoaGVhAAA/qAAAAB8AAAAkDRcHFmhtdHgAAD/IAAACPAAAA3CPSUvWbG9jYQAAQgQAAAG6AAABusPVqwRtYXhwAABDwAAAACAAAAAgAwkC3m5hbWUAAEPgAAAAtAAAAU4XNjG1cG9zdAAARJQAAAF3AAACF7VLITZwcmVwAABGDAAAAM8AAAEuQJ9pDngBpJUDrCVbE0ZX9znX1ti2bdu2bU/w89nm1di2bdu2jXjqfWO7V1ajUru2Otk4QCD5qIRbqUqtRoT2aj+oDynwApjhwNN34fbsPKAPobrrDjggvbggAz21cOiHFyjoKeIpwkH3sHvRve4pxWVnojPdve7MdZY7e53zrq+bzL3r5nDzuTXcfm6iJ587Wa5U/lMuekp5hHv9Ge568okijyiFQ0F8CCSITGQhK9nITh7yUkDxQhSmKMUpQSlKU4bq1KExzWlBK9rwCZ/yGZ/zBV/yNd/wLd/xM7/yG7/zB3+SyFKWs4GNbGYLh/BSnBhKkI5SJCVR5iXs3j4iZGqZyX6nKNFUsq1UsSNUldVkDdnADtNIz8Z2mmZ2geZ2llbyE7X5VH4mP5dfyC/lCNUYKUfJ0XKMHCvHq8YEOVFOkpPlLNWeLefIuXKeXKg+FsnFcolcqr6Wy1XK36SxbpUOLWzxg/tsXJoSxlcWgw9FlVPcTlLCLlHKtpAovYruU/SyIptJlH6ay0K13Upva8e/rYNal2OcjWGB/Y2XYGIoR6SyjtOOaBQhXJEQRS4qEvag51P4ktuuUEzGyjgZLxNkAD4kI1AGk1Ets6lVSjaQjI1ys9wig6iicVaV1WQN2UiOlxPkRDlJTparpIfqRNGUGFpIH8IsgQiZWm6SW6VGpMxiMlbGyXiZID1ksBk0tasa+REcgrWbjua9k1ACbC+aMyG2RGONorqd1Ey3KvsMmr9WKUGrtEHZP2iV5miVZrPN5uFQXa21FgShu/bK9V7HCz4/+M4nBcnA9ltfW25z7ZKNs3G89bp3io+47JSdtbHvkX+Ct+dcfK7+Bdtpf+h+/o1trsvLQPQzsat2+pW5F3jvS5U0lhdi522PtbA9L6zn5efGkM/y3LsGAHbD/g22Tyv213N1GtoduwmSRzWG2go7BIS/cix/ameH20SbZFOJQFgyAFto4y3STgLhds2m2LIn+dtsB9i2JxWyA9hJ9fuNXeLF+uvtiB0DCWES6wxgl+WMN6zPWQDCnu6j/sUmGs+LuV1spo2wdRZrE4gkiiiLfNTvJRtgJ9RHpMZ/WqP4FIBQVAv5Qp3L2hFe3GM7/qa/5BWxg2/Iv/NsW7UG7Bzvdb0p326+Inb0PesfeLf56q+7BkDEK/LaAQBJXldHI9X96Q6+dVSX3m8mGhvy7ZdDbXSCE0YEqcn86BTP/eQUL0oxdIZTEp3iVKIyVahGTepRnwY0RCc6LWlF61ee4rHEEU8CiYxgJKMYzRjGMp4JTGQSk5nJLGYzh7nMYynLHp34m9CZz1YO4ZKfMOEQIRxSC4fMwiWL8JBVeMkmfMgtfMkj/Mgr/CkgvBQUARQVgRQTvhQXQZQQwZQUIZQSoZQWYVQS4VQWEVQRkVQTUdQU0WjmujcQMTQUETQWSWguktJSJKOVSEprkZyvhYdv+A4ffhZefuVP3WPRaUeiCGUEYwlnvIhkApOJYqaIZhbziGGpSMoyEcFykZRNwmGrcDgkfHDkP4WQhQ3EQBDE9pmZ+m/pK4ovGh2DLW8Y/0wRrZ3sTlWy/Ut6kPnlj7St3vzVJ3/zxZ878t9iVrSeNZdng1ty+3Z0tRvzw/zamDuNWXr9V2Q8vEZPedSbe/UNmH3D1uu4Sr5k7uHPvuMCT5oZE7a0fYJ4AWNgZGBg4GKQY9BhYHRx8wlh4GBgYQCC///BMow5memJQDEGCA8oxwKmOYBYCESDxa4xMDH4MDACoScANIcG1QAAAHgBY2BmWcj4hYGVgYF1FqsxAwOjPIRmvsiQxsTAwADEUPCAgel9AINCNJCpAOK75+enAyne/385kv5eZWDgSGLSVmBgnO/PyMDAYsW6gUEBCJkA3C8QGAB4AWNgYGACYmYgFgGSjGCahWEDkNZgUACyOBh4GeoYTjCcZPjPaMgYzHSM6RbTHQURBSkFOQUlBSsFF4UShTVKQv//A3XwAnUsAKo8BVQZBFUprCChIANUaYlQ+f/r/8f/DzEI/T/4f8L/gr///r7+++rBlgcbH2x4sPbB9Ad9D+IfaNw7DHQLkQAAN6c0ewAAKgDDAJIAmACHAGgAjACqAAAAFf5gABUEOgAVBbAAFQSNABADIQALBhgAFQAAAAB4AV2OBc4bMRCF7f4UlCoohmyFE1sRQ0WB3ZTbcDxlJlEPUOaGzvJWuBHmODlEaaFsGJ5PD0ydR7RnHM5X5PLv7/Eu40R3bt7Q4EoI+7EFfkvjkAKvSY0dJbrYKXYHJk9iJmZn781EVzy6fQ+7xcB7jfszagiwoXns2ZGRaFLqd3if6JTGro/ZDTAz8gBPAkDgg1Ljq8aeOi+wU+qZvsErK4WmRSkphY1Nz2BjpSSRxv5vjZ5//vh4qPZAYb+mEQkJQ4NmCoxmszDLS7yazVKzPP3ON//mLmf/F5p/F7BTtF3+qhd0XuVlyi/kZV56CsnSiKrzQ2N7EiVpxBSO2hpxhWOeSyinzD+J2dCsm2yX3XUj7NPIrNnRne1TSiHvwcUn9zD7XSMPkVRofnIFu2KcY8xKrdmxna1F+gexEIitAAABAAIACAAC//8AD3gBfFcFfBu5sx5pyWkuyW5iO0md15yzzboUqilQZmZmTCllZpcZjvnKTGs3x8x851duj5mZIcob2fGL3T/499uJZyWP5ht9+kYBCncDkB2SCQIoUAImdB5m0iJHkKa2GR5xRHRECzqy2aD5sCuOd4aHiEy19DKTFBWXEF1za7rXTXb8jB/ytfDCX/2+AsC4HcRUOkRuCCIkQUE0roChBGtdXAs6Fu4IqkljoU0ljDEVDBo1WZVzLpE2aCTlT3oD+xYNj90KQLwTc3ZALmyMxk7BcCmYcz0AzDmUnBLJNLmoum1y32Q6OqTQZP5CKQqKAl/UecXxy3CThM1kNWipf4OumRo2U1RTDZupqpkeNi2qmRs2bWFTUc2csGkPm0Q1s8MmVU0HT1oX9Azd64w8bsHNH5seedBm6PTEh72O9PqcSOU/E63PkT4f9DnaJ/xd+bt/9zqy+MPyD8ndrJLcfT8p20P2snH82cNeup9V0lJSBvghMLm2QDTke6AFTIsiTkKQSTHEeejkccTZeUkcYLYaFEg9nCTVvCHMrcptMCNuKI/j4tbFbbBZ/RCC8hguw/B6fH6v22a323SPoefJNqs9Ex2rrNh0r2H4/W6r3d3SJ7hnrz1//tVTe08889OcCZWVM7adf/Pcg3vOfi7Sb7ZNnb2MrBg8p7Dba2cOX7Jee6fhjy+tvHnmqCFVJb1ePn3qzYznns1497K0c1kVAEgwqfZraYv0AqSAA5qCHypgEZilRWZ5UT2PYsgNdAxLlEcNYjwKajQGgw8Es+JcAwHH5qETLIgby1WDHhpXgAyPz93SbkOsep7hjeL0eqNVIP9lTHKRzEmHdu0+dGjn7sPHunfq0LV7h47daMbhnXWvenbo0ql7x47dmLCSvrRSvDNw6uSa3oETJwLthg9r37v9iBHt/3lj9amTgT5rTpwMtBsxtGOfdiNGtPujmzivGwjQpvZr8WesjxPZUAYhMK1F/0qJXHRyLXWOAx0H50dxboQfxapphKtHGVUGHf1gc6PC6GkIo0NCsYGDIdUo5n9yHFb8Uz0qpyqHT8qpyOmZI4w2c1RTC1d7tc4anqdBGhkdmshNVo7GA2MF8+opFMrXcvAt55yfJNbVj8SKVhCJpBCfz+vGL5mK0yVjQRtLLX1+osicbALyzY/jkdK22by5e7c3z+x5acqYSaSkScEL3Xs8T9l3/Qc8NvUqY+SjNsv87OFG3YpXpZYUzytzDe7coy/ZsiQ4Yuzd/U688NSmCXd17sZub3v7oC2fjfhCGltW8VnjxjpZZy+dWjwpIJwormzTK79/iW/wBAAgqGEiyZKzQISGiQpWr1h4SISYUkm57FNqBQIBVkr3y8NAQ+3D36A4IWQV/JmZqJw2NT1T0Q3QAqTsQblg41NPbiqQH2Iv035kK206mGysZG3YMSs7xtrMDAyhTcjWSC4axqy4LiZRQdFdvnTNq1KX320HjVawZx6SCzc8/UKgUH6QtKPt2PKac4MDleRlMsxKBpFXpq4ZVBNmKyIxHbSvMAF1NBWyAQPW6z3nEIpfMhe2fL8kuIX8TClDEQQX6cwueUmTlNNpRPey/31uR/D0LuH14ccWkqFs//wTw9hv00gu+7IyEr8T3Cw2Ex+EZHAAktOEiPrIJO5s8hWcNqema06vU3PT02QFW/8NW0tWfSM432N9SfA9chuP5WOfkxnwHUgggyki+HwUXGw8M+65u8v3uexl0v7FyJpdaRIdRN8AAdJ5nYKQIGi4CB1U8zNNoUnPR3X1LjTb4EsQYnsMWACwJO6xk7e4bT/99GX0N7R2ndAo0jMzAOfHN02cnKkT94fv09bvr5QLAD8UpuJ51ev0rCK6SgOc3gCn19OKL9lADWokUbkS0ldBzwNNU8HdEjRXVGu0qPKIei288y5jBN59h9Cfl8yfv3jp/PmLaAn7hF0izUgO6U0cpAW7wD7NP3vy5Fk2o/rUyQeieM4C0DcRjwS+aHYSJiRhdokFkVRTjNUkvr1gffj25dM3f2ZXqEN85awnGncAgOhB3A1hQDSuhqG06+MGs+MEg0I21x4BImqiqcGk+kF0sY1xoc8M45pOL4mpgk13GVCnJSTTKXr+KSPXFgybNz6w4msqEctn537ZcSt7XKC7j1Bp9YE+E9bvXiU/S5K+eGzlJwfYcRkI9MM9smOuzWDV/+9pGmaYlnq9hLYFMjf0Fje13Izl5ntACdyDxkxTg0pcymnYlcImJDTWkK0ZcHQO3nrRBvWETcbdrEfVuA6VHa2IuhjrtnyGTjYeWzR1zsyJK7+iMpFevcjmTVuxkH176VX2rUy/Wls1d+3ilceELgtnTJs/d5R85OMrL40+Xdyiev7Ln15+Uh6/ZNmc5Qsj/CwFEIfj/jeANOgFJknoJonXwOrVZBeho02iBmkcTDlsEq4XIUsyjQo+3p84FpvOj7aLuIlTcynCvocf/qlml0xn/1WziWySrVR5nj1BOt4mXPlnKO1Lm0d5sxb3wsB8cmFylDcEVyexVFLRSeV8JAmXnJAllfClLUX8xpYRRhu0x6VoUYM5CS4WP7Qol4xGbc5ACRJ8Pr8v3WalWOW2FIsc2wbl3kECqXmlRfO5Xd/44pfPn2a/S/TjFRPnLl42d9J4O90m5J9jt9zYlFL2x6eX2A/nn5Us0xftWbf+UPvWQGEBYukSOQMu6B+nMDE0VnSsHA0kECeUCrz7ItigIy5ra0J7xQK3tGcqRoQsNh92U8w/JhEZmLktBoMe7bO7rLB0epebg632jH3uY/bP+ffYx6T9mVGBvNsWTF8WkF5wOh7Pcnz4lOJvxb4//z77iJSSLGJH3RhW06N96dRHXn5ww7qD0f3pDCC6cX9ugKIoomQEkXw9VczkxNMLnBCUCoruT0/3oxKL7r/NJmk/p7m+evWfGuE78Vt2lRns9N13kx40+4fnAD8CjMf6NcP6ZYKOq42NrmfDJWy4Xj1P+cEsSLLxkhUklCwkOAq4oqQVOOpuIs64nGxq0JVQz7ij5o27pAixmy+WM/67KC2ZsngH++XyNfbLtqVTF/36ykt/vrFletWG9bNnbDTmjRwzc/aYUbPF4lnHCwofXvLa5cuvLXm4qMWx2c+eP//PkRkbN1TNWrWa/j1u+eJJExcvjpzFAYg3s44vfRL+t0nkS3xjCynWFA5OSSRLynVkyecXVH67ol5PpINovJ8YLr/dnoHXLW8MFxXW7i3ZMSj8I0l96SOSyi5/3XNvxxtbB5aMDNy4dsmE9UtPPfNIx46difLpNfI/7DL7kp1g37C3GjV6NCeL/NStbO2ps2c2bD4CALW10f4qDgYDNPymcCtU8R4uYw/H8WnY1+/HcReOEKGKyJDmBj5OcRwItIUhwnqhFpJw9xFg6CkFlTYXTfVqZdf/tfIcAE0d79/dG2EECYYQQBQCAgoialiVLVpbFypuAUXFWRzUvVBcrQv3nv11zxCpv9pqh6DW0Up3ta4uW6uWCra1So7/3b3wfBfR//rVcsl7+ZL73nffffs7HTFBR5D3WpvCDmUdIQb1I01myQTjoQl2MRpRl/r3hG4oVpCF83Vw+kdwei2j93o4WagRrjD/Nw7YgU6IrsgAfQGRcYCTLxUZur5kPuL/lYuuNgU1XoSa+ueEfPon+J1yrD1J7UCC+5VG3BHBHVHcEcUdlSGKO3nPyzABMdyNFOv48MTEyEXCyPp9KK85NAqGGrz6I7y65gckiwz3dgAI+xivtAIDOA3LqyxbS9V3By2ZYgWxj1KxdrMPUEhIZKJWxzrtdWqXG6lJNABmTO6TO6EgZ/pvgvDn0c+vb5z6WEvxzh24q2xeXq9VAwomDR8q2098/X7JuWGdhg3GY64xvHvgZPkLaR2wgixCI1vHWKJpbdGx3G7mDCO77O7d6Eeg+9T6IJEoXP9qW0dDeSvNbVsrcjvaUN5aC9pa0c2ZWrhMKvyhjOgmkGUyEsFkpRLVKsh0dyc2B5YQICBgIe/NBCIEGNktqHxMBISRCV+50v3qzz2L/GNX5i4ra+5/7cXJK/oKktUtLnpWmZsBf4zfwZ/i9d7NYU+YMLgiIyLr7Gi8AA/zaQ6/hPNgCdx2D3ukdEseEwlhjDkuaOZ8eO9b/PGA3n2za6oggAlxCaLjSGGvi6/CKXAHfhxvwhtxbhtLaVQsrIM2+DLywL6O+mUrO6a7GfRIcPf8hNHZAIBE7VQd8ASDAWfec3ESdiGTC5nSGsiiwiLUtMnjuEOk1kzFcI9JHoR5kz0Y+SwCsXdhGH0VKhzHp/+FzFeRz9+O7fCtL2Q4AL8u2e72RcFosiLP9wIgHmY+hxmEgGJg84/lVDxnGtpH+FMziw5T/GGx/Sx9V+NPbS1/uvSGcm/t5vGnTEK3rUG9y6yEYO1+tfpYOon3TSpILhmHhztfw/bCn2qhobiwdDW+fQN/CjstfKZ4Dj4A9dOWrFx2S7KdOD56V0TLD0s++Qptwe2eLpq+6O1Jo56aACCYSGT3GbIfW4Kuj9KLgIabbN50LDdy1C0P5CSL2U+190OAThfGG/zHkIjP1Tfgj2ByPUSwrYiu7925+a0D27bugj/KF/F1OBh6QhP0gEPxrZ/ljc/fsONrFTee28R4g67DL2Qd3IERJIOHLwGln4cGSUJdTxdyhgDi1AKL4NMYAdkLvyXzDscv4Os/X3r77Nm3JRt+Ef9xEdfgl8Wb97668d7lQzcAZDjMIDh4glxAaHWfDV1JZj/rSS1tOuz1hHmUcIAjHG+MklgeL6F9LCbnn+jtWIJ+rI8SzjpaowWoDFuPSrZKXAiAE5+ZjCY9wHwiifwfvmXsI9wJMhnuBBn3B5CRXWYPc85tcJTWCd84gtBCVOTYSOfNYvNOJnxzgfBNCMgDJG7zSAeR2NXUTWzOuYmcC5VObFq7NxloMKYVZwDIYliIk59EGoTQ8FMi1WHihc7472r8D34dZmIIYUsBXXXbuXHroZP7iteG4MvI91jOCtgbusEO5K+347Q8e+MPb+JPbT/Gt4ZtDjppKBnYmi4D3IJyT8WxGL/UbqKsmPH2vW7kQdLd4LSKMre9bogIAvLe7u0GiyvOul0mNypGuE2h989SwFg6lJAPH3RNyQJYyWiVDLWO6XV1aHWtQn/HIrSI4vwGGfYxf74lFwHn0WS/ZYX76uoIKFu35IbrwlVyYQCxLpa96kTTx3OvJq5zuRfv5Pnw7hyqq8P1Z75rABK6Pm/yyAWS7d6fZ34//7k8f/ry4ka6xjKbeygnyTXR9CbFOhNBTIUiJtZlQleZiHWo4RgPKCvqPoxRivhqEFpQ55fr6lbBkzDE8TtKxt+gmY6VhGRb0QTHkw6dul8oThJo+wjtwodgwulWsMINaHf91LqjZPMpvyPTOJQPmKOhI8f8PFG13EQvVGfduUdgdUUc7AqJkgqDxNrKgaMhs+eobTNFT+700efrUV5FO30KebG5Uc8EWtlONUbCMKgzknfwPPyXDJ+HyXX+Mu77L9xf9q8jy7JPHHm3L/wDzYL3tomF0LEaU3YHPO9P/D/xPpFcNlR9sDfKQ0VIyDvYAkWjZCRQzAmOFb5urd0QeRq30fSlk1sX8kKZEurossFEhcHnyoTDl8u1YiS69x3B9zwSWwMExpGYerP/TAzKwmQIe+FjUFIzXI7/xHfxIdgdStAT9q2tfHHfu+/uf+kjNJB8sB+OIDdl6AFH4n34L3Twt98O4jvvXP/tEFB10nkWhzCCLoBffFVBMRMFCoqJUu7Jo9qcQ5WQhel6UVXuFrihDj12C/rgmlv4Xfj4imeeWYHfRW0c30q2f05/8nfluilTqH6k9PKT+hJ6GYEFpCu4GMj0BlevUyth7YJ7K4qXwVBu5hBhkW1IDMiHUy53QO1z+HbC7IyHkG/FrwOur4fAz/Q/oGEDoWEgCAODHkFDdtGcXDTnCMq5zh4tAL0r8H4kpavGhqLpIBNRJVTz83QOvA09Zkyd91RIxN025kVT8WEYuGH50hX4HMp1PC/ZLpyZ9q+OkeWL52TMDTFb1nadMXVp5dSnJy9Q9tJwohNfko6pURM+HNWSXLSkiJtbsnyG2TXfxfFwS0N5+AN5LeLfk+CaalbRx3ANsgkVK167jf+BYVf/gGESurZtzbKynQeu38YXb/6EX5bQb+9sXLEFzhw+vX3GF6/ZfsL4bXnqqum5OZM7pl96/eA3tz6Xly0pAhAEAyCWMjs8lpcL/M4jdosEtVlJxXhgirkUP1GHnxBHE/PJKN6sVGi0nNDoFpObCZzc5HQCL2Jc1JAPCxfF+1idfOgj3sJVDXfxqbrX12+xS7b6DrXYAcVbQnV9h+07dmwXqum83gBIErOT0h6ti1Svgj5NhjuVyQPgGCjm2X0hcx7M1kRooc4DKgqUA2AuFBx3fnH8AwW4oHC0GH+3L9MPbQCQf2TPuZTjaH4+bo9y+oEPGxL9IFfbfYkSzHAPk61ylpwjE4wKyA1qmgtMS6QQLWHPpkMRHYZTpdFCH61HFGtTIrRCc6KRuj30nxUBCMOOwggIr9bgFy/iizK+cAm/VAOXIklse+9LnYfY9m5f0XTvOnueTgCIvzM9MZCzvDVYu64bu9CRCx3brjqoeDokgUJH8jwTKfoEd3emyyzq/2glwTUEZ8DP8AVcRf5dgafIVSthCwp0tHeEojDHRXQJfU7X1YvgdY3g5QZ6cnhpZn/AMhdEigqdGRClC7oCqqHAaIAYNrITG6pOLWguHAm9sa4We0NvdANV1WdjiPTC83TuIWTuaYynHgfcdA+1JewiQCzqxW0bu7vEwj/M0IinwRkTnIPu3PsFfeeIFu4ePbpNHFi5Qdk/S/FhFCSvBTrQmuaUyJS8Jc8JFaXYgdrxKOiFF/B4uE2q/ueVI7rPld8ykZxQQWNOCMVqtyP5KmUV0w008gZRM18weD0Rhy865yaANFUl8m6WjsuY0hgTKbXQ00qBl16S195pf0QeDCCIR+eEeMWP421XpZaC+eZCZJgOCp/C6Ndg1Ccv6GU9Ooe+cbSFuxMSGC5CQ6awjXnnQZr99YDpJtEo17b6ScLmDz5g3+srHkZm6TgQWX5HiRfY3yJDRTCIBYg47TQ3EguI536ZvstWkibUTqdDOh28yXA/rXTQWwwWY0Uhj6GeaEHmKuxAUC8ehqKsxkeh2AeEgGiwWcE2gGAboOcEjmscwUumaSUSSa34wOusF7ELa7zgtAz3Eq8yr71eb3mJxRXZXiO8iEdB7xAOrvFq8ELFtgBOj9h9A2RmQvMxZC8X7WKJUKJJLHRs5YNnVN+bw2mwVVE5gqeXj9DpX4WvvH3n+yNj8nJG/QZ1dZVHfm3u67iSu9H/o4mz+7XtE9lr3Jvbdr81YuDIvunyouMfVuDgrHnJb+Ym75vQPe1JgMAiQpME2R/4gGAwUKMtfbWiT8+rG16i0GSJiTelgngLhgXJdNQ9YHkGH0Vr6nz8lGBEwsWThZs7+Z+p67Q67/TFuukL+xWFBE/OWVgM/7mJL/fPXi37O17q1oPIn/pXqp/IwJ0zu5dvpTzUj/hQf4p91JiJYsfrtbKdZ0SWuhGqaWbNl47lZtcYt9XsR7Q4IgYJjeapCp5GttOHzr2AJNzwdk1DQ01lnYguzsh/trj4jQnZ8rYLMO5G2HUY/+Nb8tD5J7aEbT9G+S2H0FbgacuI5qslp57XMbyF+N/R1mhgQUdaSBWpROetTo9c8c9zLp0csspad8Y/bkPBiUt1Ty/oPSk09Kke82eiZlCAqd27oJx/fl3eKxuG3thi75IKv03J+uxltleGEtreEbOBH8E9T4O73nV7BAEdZeygWHtZEPGuS4LKSMkHZ1u7BNV0LmSXQgEhNzCTBJTJoqM8wQKmAuEQs4Xmn/pexTXQ+8x31xx5SF41b9TqzD6pp/YPm94MwTcmmGDMjTY3YCLEf18ukxY/3yFmb0IPYV/ZZClgXCmAIAoAdF6OAWYwABCWeJDuRnJhdH0qSmjIJwC9ubggrebyI0KSVbDRzapJptHE5dkXXqi0hT0RE+DbMSg7+8IFYXnFwgNHPT0Oi/KwAQsr6udSGg/APUU3xr/RYAxwRc2F4HpyofdwXgSSi0CKp54PAwby4oU8RZsm2CVRiSCw7A2LuzXFOgN+OFmw0ep/CuOb2f/uEZeyvvfSudZVw078UDdrQZ9JltBJPRfMIVyEYFpOnzX3jn/2U0z4B8Fh02ZMycwi3LT5QGYqPJ+c9flLAAJilot6sg+MVD+rvgO/CzihojXInKuh50RKgiIQw3zY9lR82KkJO/Nf/6hu7Nju08Lr6oQ3ew0494OjCG1eVJwcV/8rmZ7x9ToA4BJywXI2Gq2nd/VxkMEmqbVesraew1m2uISWLYqdoftXAKAGG+4J15Lf9SZPmcFJI43RQ5aP2xlEDvmoczRX56C2taxZHx+WMFn77outO4c08+lkSut+k858b8WBSjf3o5Ju4DBxDkMDQLAYADGF4KGn/K5OzFVO6h8d63FDSqznvw/zwCtFtbWF0Ae2wjuJbXEVnsORsn/9UriHpBTszLZR6c3Hx3ybjo8RkrJ1YvkvIM8geyMcjNY8h15r53Kblhej/DZRLsLIRRgz4vk9E0xtHTPjKLMLX/nyPAbzveL3TZi4LaLT85P/daRuxIg+T/mjuoL8HuNakeVY03vAyJHDxl7+0TEdrVk5dUB3bz8PRxZas2zGY3H1V8XOynMtBED0FPvQvcA9F/covAK7n5yjFyIXDlRR5xHNbRa/v/CVI3WF47pPbU1w25WT98k5xxD04txx6Yn1NQwZRT/FEVx8QBhIcsFGTR5TDerHW7bBfD1eIpnfTJ15HWHaSFrPaCZsm0jj+ZEEIx1RQ0uX/3xt6bJlS3/5ddnSurTUJSXpGRnpi0vS01DkrZ07d+6oNd3eQXzEuj1jRo8es8e0c0xhYeEOhuMiPJLiqNWhbIk5TuCkhwdvrPxP7RPK1+Ym7ZO4S8dz11rrPvGP21jw8eXaBfN7TQwJmdhn/jz4zw18qUuGo046/0yvvrgSO178IrMzNj+W+u/NjL54pFDvxL3/o+S7qvI9XLj4kYir0pyg/hDln7/OGnSsrtMzg5ny7zEuNHR890bl3+fJJXcjkJyaRpX/weQkeCch9auXnXsPvUPw9gbdAC82VEWkd42p6g022CjAKkbAKTSA6g71itCIdMpo5y5DO8d3HxFYd8nQdvEAvwiDMEJMSXQYxM67c/J1EoDUThfOkvkjQZnGItW7xm8EFr+pGCpMEIjZPVNYTl6U6qGKF5sdbEbu6ZsFkRf7oGbEWTA1g9NYcIenqJmL9dhCq+1DQ4kTIoQaQ1Fe09EfZ12Ha/SHJYETrYxp0JWRS46euHr4+DUS+hk7dEju4GVnjt069sVtGf0gLsrNHwsjknoEtd1a+syHlevkrJHZjz2WFRi1femGg9+ulvMHPaHICnPDdbRAygRm0E/jU1M6qIUsetcINl/YRG1cN+6BaXWTL5V4PtRMUfjFrLgcVKv5wDePHu3cwTfCJzB4UPvl2154QcrE/1Q4Xs16TCfbfYy7X0aDKqBOwW8ekR8eYmcmy3iGVrU37zloTa6m9Hq4ExGrEzGqaYVQ666xb1bV5uYNmRVa9+WeQXmXfkMrHLPWFqenCM3uHQcQhAAg/EnwcAddeCnGMS/v4iESE0etEalOtqIslINICfNI5IwrKdEZK7zTXDZ+cw8v+gIvvAcnDxmCztw73ijHwwGQqsmFASzmrAiNNqUXTdsBD5j5Is07sMBWhiedOQvSvINEyw6IL27vRWtW8nRFOsLTQbp2OppBJ7ds0FkqxxAWInU0nW40G61ikvzKNfztiasI/nQCf3vtDfn7cpgEBXjvOPrRw8PRUuzs8IDobwCBBQDhJnkOT1DM8RgnXR8VT3LXeTir9kC1PZy65WPp4EuHAWSgnwjVdCSRpmgZ5h3sIQ+TJ8rMTzdSM0IQ6IjEj6EZvw7z8Y3PPsO/wXzy3hedgE87rjku0speFIbMCu0NuKdQT3A2gWGcVNVUOel5VtNwAhWxRkrug0pIkSz8KEjQdON5kfIBwU7W2GGJNN74i798E3rgjOhdZa26hbTw6qDvkh3QBs+C7tD+FLp9L3TaPr0biTgMSx4lxgBIdBYQqihv8nvkPxKbKiWFSetRqOOa0OPo0b3om6odCn2S8Da0Xk4FrUBbQMtjQCxNiWa70doHMnC1gmadmyKjnVH4eJaHZzLBpInSo4LKF0aMGjXihcoOo/oNGjx4UL9ReFviH6+dHj/dPn3i6ddqEldbXp5/evz+mNj9Y0/Pf9lC8XgT18KBD611htTiG/jSS7hWfl/BuwXBe4YG71axNj+Ctx/FmwxaWW3Xmf0Y3uYEBV+GPlspiq/VFKqg36IgZ2he3tCcgg5HX8wfMyb/xaPfUTwn7GsXvX8SxXN1Ys1rpyeShxh/+rU/EhU8ZsAl4gUhFgSARGAzECSaqly2GfjqJxb7JTdtAXRHKva7oocjFffQaU1csC0bvD4ncUj7lAGvvr5i0Na+CYNikweh37d+mdm9fbtxT/ht+SSra4eooh6Kv1KGV8JSsTPzV6IYFVUxpqc6EFC7nBb1y5oKa01zVSn1UvBKoQrC60puxFNokCJAGJio8cU4ueUaM/GkG5iObmz0uO+xEG2ivTBV0zGQjuUtm4isKF0/LLjCuoL4+MqTQ+deQsIH6z/+6PTpjz7ecVBAlxoDLNLiMy2v/xoMIz8Pq4ZtQq583/KbLVJjoAUS7QjEiSTfEwoKwH0R4JpG0O4m8ih2i8SqZC2x2gwVLZGw0AIbe4CvhX7s62otmglX0S1oJYwXSSgcyRsDZrIvf5FiotBX9REesbHSczvdf608+5OIrhcNHDTKHS5DQ4r7b+t89KhXef7cyt/P3jxnlycULpn5e6Wy3nkNP0vZ4i1WsdoeECXPB1Uj+QLUmAe1Z6QuUik9TYxMdNpbiWa6jZVEoi+xGZvHxxGTF4mpvQ+NKXyn5+I1Kzpak+LXrVnbw1Yw0t5z/dpN1iRr7Kq19bNrXnu1pubV12ompXbJTF267tleB0YVHsreuG59Ykpq0qb1W/v8e0xBec8169G8QxhDdOgdCBqUPRQIgPg+2ft+YKqyJn7kEfy4TGIzrUFJVYm3UYi2Az3d2OQ9DfWSwWZk7Gfk61bkaqYa6VjeTHPfw5k0sJiUf6SlTvkHLegpmAW98dPQF++Go/HuOrwTFpK/YDwNGoQOaJEjofLpyps3yYBOsbV4hsivIqW/ka4F4KuM7FDZezDWLsmAvpNiK7ylYAnRsnCy/ajF+8zPP/+Ma4UW9T8LH6O/AAK5uLW4mvCqldjWs1hni+qb0t80u4c5c5Kp2tywOVWtjHexYe0dwpSuLK5Nyt4ysQO9G0Z788hYHt1kpTJXru5s1yMjTW6KvHkbzgLTyntzAgUXVw/tn9UV1/zyA/6UGLmvzp27evl7tT8P7p/VBRqv/g71JMe5ekHp0rlVt392fBLVJzwxfv7R+MdDElOegSfyVkZ1Wlnw1vFT52U4d/Lo3r2HJWW8++aw1e06rSp45dPLJ+XC5YW9Bw2K63KonUdAM9PAzkOHJxpMnn4DH+tboOyT58WfhDnOtWnFMjCwmppROrVc1VtHDH5E+YHsUon8CXNqa3HQrVviT2fOnKEZi8GkruEHqQq0JPomHsxQ+DSGLEVMI2tayYWV7juLeJ/HYkjht6hR15ZISmox1u4ZaVFaRu0GT5G8KzeKfIWeqFkgkXaTskI9ZvO6+BTO6vtwpV2H9e4ISvKfjeIgJNp27ztyZN/uchFtGjYsv7Awf9hQhzcc/OdtOBi/cvsv/OpcuAe2gZFwDy7A5/G3eBQaIG/d/eVbs974eu9mOX/gymmzn342Z+QyfAdvhROgG9TBcXg7yVknQxvui4/hKtwH2mkfAqoQfFiNWTR4i1Zf30+dUJ4tkWnqhg4hZKCKCFSz9IemXlYvs4phfaz9sp4UZQXrY/WouCJdn61HJJdyRn9Bf0NfrxfzKjz1LfSImI/6gMZ0iforzMmMaFzfDPcPI6ojrkT8EUG+BSIMEWjaQeVamHaQXodECMWEvk1lVCKbzqigkW4egmVKn1mlrzz3bPJjXZ54Acqvrl6+W98Mr7BOav5Mj5zO6KgpNjA2de7EKbOtaZlxsV7yqNK1y/Fx65Co0s5hEzLaR8coteujwAxhlrAJRIDqvy4BHaiGXRsuAQhK4EzhqBAOJNCccm25IPBZQponO/qxY5mQBWdC8TX2W86+NCTTqlwgqnzrCcygE0gGa/jMNl9j4i1y/q5Jw4MB3ibW8BtbUR1wJYDk3FqYvFlzEVmlFiTdZg1oQS+tseX+mm+F+luVNmFbdDWpvKZNSJ1FbVhCw6dGDf8qpR9+TZV+RDZ2JQ12Zdm5WoaGh7fCgK1vpianJeo8drqLWb32lHXN71NQis7xPAtTXHj6DfyW0H9ZSfKw4KCneia1zTQZTP2iErp3XZ6a+ERnpq9WSM2FfCZPDLSLievSpGuS72iLvpGa76Gyp0SwoVXSMUb/ni60d1flz1l3wugfuJ91RySF6U52ByBD08vBtwwrkQRNF1HJzqJJ27dPKtq56sk4a/fu1rgnxXcm7907efKOHZPjuz+ekNCjB5OJIxquCXWSB8HLG3SluoWL4hHF0WQXpV3ycle0l82LU6Z8eyUkI9pFl+IbvAOO/QaG1x8RsoSVJ/AMuOoEXHT3chWl41NoJ/pKOgECwRjXrgKVMm8B2ssAYLGS1Z1C34XQevFAzV5H1do2A/SQTj6CFWyqy4CkjtBXjv2wY0Yba0JqxttIfn39qp0FsxcjmI92rocg4fG27ZJSOsjj1pfO6DdzwmQZQDAKlaHrJCcdBT7URBoJ7uUy0liItFCCjoHqA10OJE/wViD1UwLJAwXTyyl0KKNDOh1q6AfZdGhQgOkzk2+Uh2qkZFQosyiiyP6LgsUHY6PSo7KjBPKVKMJK3lHBUURmXo6qiSIC8gNyq7ytZlv6to2i3w00KAHtTk0QRY1SaRsB4+H+zNTMtPh0SqPSza93T328Z8XmFYdk9Ha31Ixe3bvNE5+O7xAZ3y5UHjV71uTE4QH+I7pOnT9nqhxtjYtJSlyi2HuzST7/cWc+n+rCdJHab3RooEO2SLP5IqULeVdBE/VE3rxFPxpBB286XCYf2cD9fD6gpQACaxQw05Q+9EK45oh0XMb1bM4NJDYczOIAOeAh4XMuDuDhEizjC328XZtzNEEopkJYjBguHVMweErLusu6mFk9U0dH1JJQyqaXZqemCM3vHR8Un9AiCKdJ5xWapAEgTGU1ia01cdQHGhUQUFxwstVCAW2vsvigBTnXsAMK1+DjyA0Kn52F0t2+7Df3of5wg9BFkVNC7H1yKXYO3FBbi/r/ocxfhDPhSQLpDTowf9pNZdipLAwgcnHCZqLWl3AyS6RiGibCNM+MQa/u1qX17NY/REjw7N937Jxn28W0ay2tUuYajLbDLUQmSqAH3wf8P9j3XHewTeC82LD4cLjlwxKYjrajki1mJudmEXuknbMeNQOQFeREsL3Eg9ojdAghA033uB7p8D89p2HW4T17jhzevffIW0MG9h8yNGfAYHHmpvfe2zR986FDmweOGzdwes748TlMR08EW4VVAjE8wGd+AOjAZ3Aqu28DQLpMdHUkOA+Gom3k9XPoD4heAt+gdwEABo5aBB/lOzKQqhhsOHBr/C75zjkhmn6Hr2pk3ykm39klnWDfOcu+840wi3XNfQsMaCf9juposO8ABEbimcIXYmfWA9YDEEl9v/NL///p/JJZl5eye6xO+zaOdYPRQ03Q6yh9ct9h40f3m45+E+CfH35xfcO0pGDS+oV2r5ubm/1sTsGkXNb6dZi0fnUcPhjuvsZsKqUnSReKIkBr9mRZ0APmAndwwEsSxWjySCqMRYWZCT+CwymMwRWmuwpTBV6BQylMM1niYUarMMfB6/ApCuMtu/yOlwozESyHecCbzEVhaCzIi4hiLe5lKuwxmAEPUFiTRGFNylEwzLdp+AsA3WDJxnLJW7iqz0c1PwiiMxRkHyHAPJdOFrsnkJ2+CSCtMNpQpw3wLrTAl2vINGVgL6LueAodcslAO+gF8o/aB0b2By0k/Dy4fqE39ngHXyJ2wRXHXB/U2vGTL9p69yac00JS2rmO4fHHcAIchxZAoOwbnEr7nghdIgDdN3PhkYZ6cp/197C1bqOsNahqXGuZ0V+F6a7CVIESZR0NsguMlwozEQxvXCPZZY0avqC9HGzOdsqcDUuUOSUJNf7eGwCghTqLCjMTJCn85abCNJwjMHMZXgpMVUOagpebrMK8T2A2MrwUmIkNgQpeDIbWKUmN/ABaKzWzTN7Nf8QpC3ZBAk4WuExYoOKscFkgWjZdoL1PAlXFArUjhGABFZcjQSP9q12LdCSuL4haW4GN1S5q05bRonZtERvxyPbt91u3WmEHa966BAW0/lU0Q23hQutxR9bChfswmit9D2yfdXTus98b95nOSSul/0CXSGA6Ofe9H5xGYYIkDx4mQYWZCT+BUylMsCtMrgpTRaT0ZArTSnaBma3CHAdfwMXsd1xhQlWYieANWEzXLoTC2EIMtpbOtYOgN/hauCEuB55ExgYQx8K/QoBG2lEismMPdGykUSsjhIkQmiHUQdgbpuCqTTAZpmzCVWzAx+BTsAvssgW/zwb8/haYiT+gcwgEn/2kP+N3EADCCRUH8B0HfPywPR/ADtWGjNqH0sBbcGh7+tJWeYlmN5XWDVbER+ND1LdjiWdqJEDiyJmhEum2EFMhEvppGjr6b0wftKk0bwztSih47cn+m5b0GVjfM8wiwzux07vtexdV+ptk7BOZH9/Y59G69YaLA26XKW0KJAp5acD3i/Dd7BWxUBjWpt1vB1OLomD9wRYtfjvE+IfVsbO1SHLyhlnZs0bJna2XCmNRYWbCT5U96+cK012FqSJ6dCiDkV1gvFSYieBNZc8yGJsfkZSqvGf10GzOFOec65Q5vSSFrwECmwjMQtaXZQLZfBU+Z5raIfBwRhrdPegOp64d5OpAbO6urpuPVWlfoQU7Rh+ntQ9X/FULvfGt2r/q6v5aQf6TbPjXusqqWvwleReOA1eNHb+G8e0z5Fl3ysEgEgzSSBxfrhrFtbVGLzUaB/4avgrxkZh7SZqqXZrrGt1dky8wcQVPccQMbvRf4Nzav069+t1M2PX8sf6vRHRsOy8tLx+/t3BE+vApYrcrd//9xrSzaV3xTysrKkKDjgW0yeneC5rWD/y8Z9+CTcuUtWB1v9IVshZdnbpkMQika9FODmBrocJcVmFmwiQQQGFiXWBkyQkjg6oUM4Vor1MgwH0YiwpzPC2K/coDMNJpFWaifwvKRR0oDD1eK6ZaO19vFadj4DMwjULGyxQy3mBLdsoZAcQ1XJeXin1Ae/AY6AJOc9XNmkO9Hl3qLLBSZ3s6CKYrlh5bUZJelk4rntOJ3shOH5GOpim3iitq0hvIC1GeTRc624PYiy2dO6GGapk2fLdtrOaSRKut1bTztDNfH/rwCB5LcPB1o5p4HmwsIRWvLj2Tlfz15opjt375NG9Q3qRrSK49Oem1pPSXx3x9wzFEEFevGrWw35OPnaqflrWh7ZmiucOFjPHTPRA8OM40NKfHqAM79rzeffi4YZnN5TWHumSkZ+G7P62Rl+xv3/6FmF6Hnux4ZFS3zGz0S9kMqdWEUrbG/XAqrU0ma/e4065JY3YNq6uVvif3n3Dy4hLQgnJIiFPfqTBXVJiZsLPCr2EuMLLMYBgvpvlTiFCdAgFUGOmMCjMxMIhyT2sKY2ttsFkUPmugzbeljB8/cto9Y4HE7B7VXgFlAKAC6ZQTRgYzW4hai4bZT4cJTJ70B4NR7B4LQAxKp9o9+wnMTOmgCjMRO4AMvBmMq92TQvi/j3QTWAhX7wSkxJivPAgOIiaNV5BOqc637/Uil4AOJq8ges8Um2EONsWa0k3ZphGmKaYSU5lpr+kt0wcmT+IaBpkoTEis3dcUwvReiIm+AF/K+zQS1lbD1AavtvRDczBLGepcm9r8CAv6Aqf3TjUjCTpLkYnxEVSi0fwbDceQK2fh/uJRk/CX3/+IL0GfSwO3xon6/hn4dp/vLL0jew7Y1uVsH9x8wfaw9eMWbtwq6SfgG/86ewcfhwHVP0BzepyUvztlS9E82aeVvsqY1X560b3U6n1LO2RUPDvnTbpOrL6QyZ9+ivwZyuSPWSeq66TU/TH+6u/kwT0Kf7WWFSgV5rIKMxMOVORhpAuMLDEYxoNDmTyMeGAu2aLCHB/O8Il8EJ/TKszEeCYP21AYWxuDLZxxhEDwfFVMFA+ynI8nSOXPaFOsVLGaNeOowQRAT5aiXs9U2vvvxgd1w6k1S/7ExHq9cBsvpqly9PiXH1y8d/simY/gNZPUHh7m7Cq+1oQZWa52lcDbVa14u4pdqXaVkTCMakpRHlKNLOtD7Koc6H41fnTME+vGDx+F//6lw7CoJ9aNHT2+rmUrGUb4x7cqWQDrA/1lfNm3fUBJCYqshfFGnw1f9LhWZrqNP/FutuFs9z+29FnUBqIhnl4nd3ad2RY67G5uJ/Yoa8FquthaDHHyxm5FFphkN7ZiKswpFWYmHACYNPB3hfmDwTDeGIIYhI5BaOc6qMJMjGOSgMHY/Gk9gfJbrN6HzZfrnM9fmS9QNjXaUitJLDDtv+tj+U/ViTbdx5Km1InWdVozvOkyUd07jje6dOfrRNXnY3TIVehwl9EhUEeejgZ0zYz/IZXBrBaEr6XWN11LXUpLxBU5WthwXdeDnYMVTmxOEgvlDxhRQ6KPbjD35jxE+wgj9SppROAseUfz8768ojfzRcP+XEUJX0Nssaj9zdSxUE/ckNRiVpqq0/WoX5y7OAvXEx8oEwrd1mYLs+lJHPRUjnsF1sKO8YUd9x6o8PCEPaEH7ADdYS+9eyUurMRWX6LykmS3Tyrxp1WfAra3CU0QsZdCQQdiMc3WnJb1yMYQ/ribBGCk+iCBGEoJZQkoj3tmwB8aF1FNlUqM5k7HatW4UVpgmjZoIBeSVG0aadjiM5mZJxb9iv8mEmHxycyMD6fxLTL3xs0vLSkpWVyyQLjT2C0zetjwUTCuzkSkQuHw4YXaphkUuff4CVJ7ffLkTjhG7Z/ZSfLsKcS3dAOhLMuO+Cz7QW9dsC5WJ+Qpx3GSbIOORGytQkpl2dqPoFuZWO+/alXgHwoflooDUIR0geXNOrL8lKCWDKcL2c7yXe/7kWAiAhovms6OUeKVzhs6eM6cwUPnTU6OjkpKiopOlvwGFBcPGFhUNDC6c1JMTDKEyUpPgfi10E/6GxhBAmAlU9qZ3KtpqMtLe8ugXngprh1kk6s1XQwHod/sYd1fsEYmLJk1LOlAXESSVD1i+dDMmLD8VUMz2jM59xIqEn8WOhJL8KvzIMeaweJIqEhy3rOBsWMzKH5dhL/hcCLDJGDQ1GL6siZQo1UwhXV5blbKRfEALMQ73iPw3YQ7MF8Lz/Yqg4fKCaf59AvSIPwczK0CgM2B78Lh0Is/C5WIi+E7F6Zc9MVXoTv0IPhRXNDz5LcjwEkmc0/CJwEARpceDp3q7xJc0FsM/hSDPwX7MXjed/RQbbsuDWa0HYYCiXCDO8WEfRbO0JbYCAc8NzXla9iNjk/iT2HkT+fIGHsBKP4pbEBdhTvAi3CmXfAQol0j+c/MLhw7Z/bYwjmCJX/O7BG9R86YOYLmJ8FWZBUOApl8L4Bsa39ahRoG46EVpvz9Er4CQ15CEXgaXG6Ey+k8Awh8CxVeovBGaIJhRuEeDMFXXvr7b+EgnmvEc2EZXEfgY0CRME2KBAJ9KhDLjqJLjITmV+lhzUXsEGb2/OmogzCIyGQP0Ayk8/H8+31HdllydzbjeAoaycJYVSmq9XIelUkrnSKhVfCJFNCXpaVV2CrCMyer5NvC7G0221Q0w3EAPonw2/SZehK/4AqZOxqUgvsh/wfKsaIjSTlWbDQ7EI2zs/T8YQOAnupMYMhR53bvSHqcDhlskbyrZ6omd+jR5y1cjWeLSa1CZ3KQGGTsLw5om+os9J+wC8ftWPbY1DjfpHlpN/F3G8h/MOxmyvQs34RpSUu3wzM4Dp6BJ9HUV318jnkbYIuPUOWiSv1x2NrgfcJgPFDcrHKRwj97UJHwvdDx4Wf9Ct/T/DYqqlLWyx8A0cz6CFuAyY/qJNS2HjWpPfzJhf9/oseQqvkjL7xw9ewTa3PD02Y/XjT2q6/QuLo60muYW/llcMuTphYFBbmk17DRDugNgBAuWAjPGUA3Dc81d00lIHeRsh2KLYfajLzBeVarnnGeN8950Gz1idShA8XFH+DRHvDFD/EY4bysh6Hr16+fjoKwLEET8mW0H9XwJ7outANRYIsmz95cSznFHnsw726PCmymSZE7s+FqplxJkudpE+aPzpTbHw+GeeStNg3/n82ew3OPzp4zmQTQV4QegaCPpmai+QNnHf+vqyMs/4fqiIfURgwGAG4hOEogRiPTmzd1zjOZnmuXVFO4LIGr5mQsak5mJpzXmKNT8jb/Bbts07oAAAB4AWNgZGAAYen931bF89t8ZZDkYACBIx8E9UD0OZEzun+E/l7lLOKoBHI5GZhAogBOMQvyeAFjYGRg4Ej6e5WBgdPoj9B/I44FQBFUcAcAiWcGPQB4AW2RUxidTQwG52Szv22ztm3btm3btm3btm3bvqvd03y1LuaZrPGGngCA+RkSkWEyhHR6jhTag4r+DBX8n6QKFSOdLKaNrOBb15rftSEZQrtIJGPILCkY6jIjNr+KMd/IZ+QxkhjtjAZGRqNsMCYRGSr/UFW/JbX2oq9Go427QIyP/yWbj8I3/h9G+5+o5tMxWscbE6xdmVp+DqMlJzO1Bclt3mgtwOiPxcbmGI2o7KObO5lzmD+huI7lb9+ATv4Hvv74B6KY4+kdvtQ1FJG4dHCF+dH8hatOQjcCJwPszsXs7l1oo/HJa86vKSgqu4lmdQGjpXxPH/k1PEfj0DaoP7ptc7vQKphrtAksG81RySdb+NnazfUr/vEPiGj+1/jGKCizSSLCLPPvPi8Nn/39X/TWlnbvheT1IympZ/gt9Igueo8S+hcTPspAYdeXBu4c5bQmrYO/f9Z3nM7uM1prdkq7stRw5Sknc2miy+mn35BK0jFGvqGmJLS5k2ls66t99AVzPqpkHKWehigT/PuH+Lhj+E6QRZDDSyRneH+Qg/moscqXIcLLDN5FM5DTN7facniTZzlsY4Bepkvw5x/io7UkeJaDZfAm8lt4kfxGb/MKY6wuI8UbGbxNX9JrV7Pl8BZBDoPpFjjY6+MFVPw4OfndJYbLPNq5I7TxnZn8UVtmhEaSzsgYWK4ZN8gox83b6SL1qCFVKeBGENNNJbXmJLu2Z5RO4RfXnZyuEuVcQZsTn8LB3z0FW2/CPAAAAAAAAAAAAAAALABaANQBSgHaAo4CqgLUAv4DLgNUA2gDgAOaA7IEAgQuBIQFAgVKBbAGGgZQBsgHMAdAB1AHgAeuB94IOgjuCTgJpgn8Cj4KhgrCCygLggueC9QMHgxCDKYM9A1GDYwN6A5MDrIO3g8aD1IPuhAGEEQQfhCkELwQ4BECER4RWBHiEkASkBLuE1IToBQUFFoUhhTKFRIVLhWaFeAWMhaQFuwXLBewGAAYRBh+GOIZPBmSGcwaEBooGmwashqyGtobRBuqHA4ccByaHT4dYB30Ho4emh60HrwfZh98H8ggCiBoIQYhQCGQIboh0CIGIjwihiKSIqwixiLgIzgjSiNcI24jgCOWI6wkIiQuJEAkUiRoJHokjCSeJLQlIiU0JUYlWCVqJXwlkiXEJkImVCZmJngmjiagJu4nVCdmJ3gniiecJ7AnxiiOKJoorCi+KNAo5Cj2KQgpGikwKcop3CnuKgAqEiokKjgqcCrqKvwrDisgKzQrRiukK7gr1CxeLPItGC1YLZQtni2oLcAt2i3uLgYuHi4+Llouci6KLp4u3C9eL3Yv2DAcMKQw9jEcMS4AAAABAAAA3ACXABYAXwAFAAEAAAAAAA4AAAIAAeYAAwABeAF9zANyI2AYBuBnt+YBMsqwjkfpsLY9qmL7Bj1Hb1pbP7+X6HOmy7/uAf8EeJn/GxV4mbvEjL/M3R88Pabfsr0Cbl7mUQdu7am4VNFUEbQp5VpOS8melIyWogt1yyoqMopSkn+kkmIiouKOpNQ15FSUBUWFREWe1ISoWcE378e+mU99WU1NVUlhYZ2nHXKh6sKVrJSQirqMsKKcKyllDSkNYRtWzVu0Zd+iGTEhkXtU0y0IeAFswQOWQgEAAMDZv7Zt27ZtZddTZ+4udYFmBEC5qKCaEjWBQK069Ro0atKsRas27Tp06tKtR68+/QYMGjJsxKgx4yZMmjJtxqw58xYsWrJsxao16zZs2rJtx649+w4cOnLsxKkz5y5cunLtxq079x48evLsxas37z58+vLtx68//0LCIqJi4hKSUtIyshWC4GErEAAAAOAs/3NtI+tluy7Ztm3zZZ6z69yMBuVixBqU50icNMkK1ap48kySXdGy3biVKl+CcYeuFalz786DMo1mTWvy2hsZ3po3Y86yBYuWHHtvzYpVzT64kmnTug0fnTqX6LNPvvjmq+9K/PDLT7/98c9f/wU4EShYkBBhQvUoFSFcpChnLvTZ0qLVtgM72rTr0m1Ch06T4g0ZNvDk+ZMXLo08efk4RnZGDkZOhlQWv1AfH/bSvEwDA0cXEG1kYG7C4lpalM+Rll9apFdcWsBZklGUmgpisZeU54Pp/DwwHwBPQXTqAHgBLc4lXMVQFIDxe5+/Ke4uCXd3KLhLWsWdhvWynugFl7ieRu+dnsb5flD+V44+W03Pqkm96nSsSX3pwfbG8hyVafqKLY53NhRyi8/1/P8l1md6//6SRzsznWXcUiuTXQ3F3NJTfU3V3NRrJp2WrjUzN3sl06/thr54PYV7+IYaQ1++jlly8+AO2iz5W4IT8OEJIqi29NXrGHhwB65DLfxAtSN5HvgQQgRjjiSfQJDDoBz5e4AA3BwJtOVAHgtBBGGeRNsK5DYGd8IvM61XFAA=) format('woff'), +} + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: 200; + src: + local('Roboto Light'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEScABMAAAAAdFQAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABGRlRNAAABqAAAABwAAAAcXzC5yUdERUYAAAHEAAAAHgAAACAAzgAER1BPUwAAAeQAAAVxAAANIkezYOlHU1VCAAAHWAAAACwAAAAwuP+4/k9TLzIAAAeEAAAAVgAAAGC3ouDrY21hcAAAB9wAAAG+AAACioYHy/VjdnQgAAAJnAAAADQAAAA0CnAOGGZwZ20AAAnQAAABsQAAAmVTtC+nZ2FzcAAAC4QAAAAIAAAACAAAABBnbHlmAAALjAAAMaIAAFTUMXgLR2hlYWQAAD0wAAAAMQAAADYBsFYkaGhlYQAAPWQAAAAfAAAAJA7cBhlobXR4AAA9hAAAAeEAAAKEbjk+b2xvY2EAAD9oAAABNgAAAUQwY0cibWF4cAAAQKAAAAAgAAAAIAG+AZluYW1lAABAwAAAAZAAAANoT6qDDHBvc3QAAEJQAAABjAAAAktoPRGfcHJlcAAAQ9wAAAC2AAABI0qzIoZ3ZWJmAABElAAAAAYAAAAGVU1R3QAAAAEAAAAAzD2izwAAAADE8BEuAAAAAM4DBct42mNgZGBg4ANiCQYQYGJgBMIFQMwC5jEAAAsqANMAAHjapZZ5bNRFFMff79dtd7u03UNsORWwKYhWGwFLsRBiGuSKkdIDsBg0kRCVGq6GcpSEFINKghzlMDFBVBITNRpDJEGCBlBBRSEQIQYJyLHd/pA78a99fn6zy3ZbykJxXr7zm3nz5s2b7xy/EUtE/FIiY8SuGDe5SvLeeHlhvfQRD3pRFbc9tWy9/ur8evG5JQOP2Hxt8ds7xLJrjO1AmYxUyiyZLQtlpayRmOWx/FbQGmSVWM9aVdZs6z1rk/WZFbU9dtgutIeCsVivND1dsWSG9JAMKZOeMkrCUi756MI6AN0g3Se1ellm6GlqOXpBxuoNmYXGlgn6D/qo9JOA5ksIFOoBKY79K6V4qtC/ZJy2yXNgPJgIKkEVqMbPNHpO14jUgXr6LcK+gbbFoBEsoX0pWE55Bd8W/G8BW9WNboZ+b/KPyWslDy5K9biU6TkZpY6U6ymiLdUv0Vyi9jvt1boT+x9lTmyXzNUhaHKIcqyEaDkLfw8YTQBNDpo2NHmsVjZtrl2u/kZLmDlHaT0BJ1HTZ45+gbdfTSznJVOK4WQkWAAWgiYQQB/EVzAxYhheIvASgZcIvETgJGK8NfDdgN1GsAlsBllYO1g7WDtYO1g7WDrMcAK+a2UA6xci+kp0i0EjWA4s2nMZO6DNrE4zDDbDYDMMNptIHSJ1iNQhUodI3R4DafGzG8JSKEUyRB6VJ+RJGSbDZQSrWsb+KJfR7OAJ8rxUM/Z0xq6Tl6Re3iTyjUS9WezsQ+7e9L7j24G//uznFl2th/WAOrqPNelG0hq5z6Srk6Ub4Kau0Mv6qe7W7ZQPsxIhPcgeX3sPns6DCDjYSX/9rj3/7ka8bbeNGQXHE/UzyZb3Naqtt/W+FAepZ1J3mVOWPoW7ipYzFE8hSiE3Erfcabyo/I+kF7TVzPBMiq6VU3Wr/FGy9F2y1MD5aLfeG7ukh3SKztOQHtOldxmvgTW/3uWKBeLrqifdSuxbPeNypiOTPb/StfqBbgBrYCOIKkifoH6ou3S//oxFky4jLzLWvTSoV/RrU96pR/UY36Mdx9VzerNDbA+b/M8UzXE97TKTYCcvdY079Fxl8v2duY3vJb3Y3lvbjK+QWdMjScujKb226ze6V0+AH9gHId3G3ghxPk5yZs+m2BVzo4j+otuYZ3wX5ibGa4uP3R5tYufcaU32pGm7er+ninU2ffVaVz47Mt+tHXstTVvae0Cv3PeYTjqG4n5v927ukWDyTnDucuZXdXEerpqzcsc10D9M3nKnmNPFnZ6n7nOlY/RxrdBhYDA7yovKyx/Mq5N0vr6l67EIaA4ne4k5369QP6Kvpd4r8RRjZ+hP4PPkPrp4i832qOJ/AP1E1+ke7uE9nPDWJJ+Jrx4Cu92zEZtr6m93h6H2O7CDtjENA6eSpZOdzwL/84C8m3g93kuyeVN44C/L1LyIT7J5D3gNqz0SVjloc7lZuAc7/RfC3NHu/+dBU8tP6vORAnN/90poeoM+5H3vIaYsM3omo/oYwfVdgLgpk6+vWxvGSuQWfkuMV4v5+Q1TAaIMIr2ZVYhyIWLzCipijKGIT4qRPvIU4uNFNJz8aaQvL6NSeBqJ+HkjlcHUKCRHnkEKeDGVw9dopJdUIBkyTsbD80TEIy/IFKKoRLJkKpIpVYhHahCvTEPyeGVNJ7oXkX68tuooz0SCvLrqiXCezCeSBbz//bIIyZAGxCOLpRGfS2QpHpYhPlmOZEkT4pcVSJ6sk/XM1325WdKC5JsXnCVbZCtlG75djiSFI9uwkwE37hv6Md6G2cx+NJYVzKs3MxtPlJOQ/sxtqjzEO7FaBpk5PMIMZtKznvgGm/hKiKsJPjcw3oj/AIgWgIQAAAB42mNgZGBg4GLQYdBjYHJx8wlh4MtJLMljkGBgAYoz/P8PJBAsIAAAnsoHa3jaY2BmvsGow8DKwMI6i9WYgYFRHkIzX2RIY2JgYABhCHjAwPQ/gEEhGshUAPHd8/PTgRTvAwa2tH9pDAwcSUzBCgyM8/0ZGRhYrFg3gNUxAQCExA4aAAB42mNgYGBmgGAZBkYgycDYAuQxgvksjBlAOozBgYGVQQzI4mWoY1jAsJhhKcNKhtUM6xi2MOxg2M1wkOEkw1mGywzXGG4x3GF4yPCS4S3DZ4ZvDL8Y/jAGMhYyHWO6xXRHgUtBREFKQU5BTUFfwUohXmGNotIDhv//QTYCzVUAmrsIaO4KoLlriTA3gLEAai6DgoCChIIM2FxLJHMZ/3/9//j/of8H/x/4v+//3v97/m//v+X/pv9r/y/7v/j/vP9z/s/8P+P/lP+9/7v+t/5v/t/wv/6/zn++v7v+Lv+77EHzg7oH1Q+qHhQ/yH6Q9MDu/qf7tQoLIOFDC8DIxgA3nJEJSDChKwBGEQsrGzsHJxc3Dy8fv4CgkLCIqJi4hKSUtIysnLyCopKyiqqauoamlraOrp6+gaGRsYmpmbmFpZW1ja2dvYOjk7OLq5u7h6eXt4+vn39AYFBwSGhYeERkVHRMbFx8QiLIlnyGopJSiIVlQFwOYlQwMFQyVDEwVDMwJKeABLLS52enQZ2ViumVjNyZSWDGxEnTpk+eAmbOmz0HRE2dASTyGBgKgFQhEBcDcUMTkGjMARIAqVuf0QAAAAAEOgWvAGYAqABiAGUAZwBoAGkAagBrAHUApABcAHgAZQBsAHIAeAB8AHAAegBaAEQFEXjaXVG7TltBEN0NDwOBxNggOdoUs5mQxnuhBQnE1Y1iZDuF5QhpN3KRi3EBH0CBRA3arxmgoaRImwYhF0h8Qj4hEjNriKI0Ozuzc86ZM0vKkap36WvPU+ckkMLdBs02/U5ItbMA96Tr642MtIMHWmxm9Mp1+/4LBpvRlDtqAOU9bykPGU07gVq0p/7R/AqG+/wf8zsYtDTT9NQ6CekhBOabcUuD7xnNussP+oLV4WIwMKSYpuIuP6ZS/rc052rLsLWR0byDMxH5yTRAU2ttBJr+1CHV83EUS5DLprE2mJiy/iQTwYXJdFVTtcz42sFdsrPoYIMqzYEH2MNWeQweDg8mFNK3JMosDRH2YqvECBGTHAo55dzJ/qRA+UgSxrxJSjvjhrUGxpHXwKA2T7P/PJtNbW8dwvhZHMF3vxlLOvjIhtoYEWI7YimACURCRlX5hhrPvSwG5FL7z0CUgOXxj3+dCLTu2EQ8l7V1DjFWCHp+29zyy4q7VrnOi0J3b6pqqNIpzftezr7HA54eC8NBY8Gbz/v+SoH6PCyuNGgOBEN6N3r/orXqiKu8Fz6yJ9O/sVoAAAAAAQAB//8AD3jarXwHfBRl+v/7TtuWLbMlm54smwIJJLBLCKGJCOqJgIp6NBEiiUgNiCb0IgiIFU9FkKCABKXNbAIqcoAUC3Y9I6ioh5yaE8RT9CeQHf7P885sCgS4/+/zE7OZzO7O+z79+5QZwpG+hHBjxNsIT0wkX6WkoEfEJCScDKmS+FWPCM/BIVF5PC3i6YhJSmzoEaF4PiwH5KyAHOjLZWiZdIU2Vrzt7Ka+wvsELkmqCKHtRYVdt4BE4FyeSoX6iMiRPKqYCxShTiEh1eSsV7iQaqF5RBWp7FaE4o6dwoVhHy+H5apHH6iorqZf85805OM15wrd6edSAhGJjfSCa1KSp0jhWk4gFiFPMYeoEleg0DpVcNXXii6SBCcFl2qieaoVztjYGdUOS3XslExxjbAHX+fyZYFqoTQgdCfnvz6snaPcl/AK611DiLAGaEgm6fRmEkkCGiK++MRwOBwxARkRsy0OjmsJTTLZ82o4OSU10x9WiaO+xutPSM70h2pFgb3Fu9LS8S1RrK+RLFY7vEWVjAIlqU5NdNUrifomza76iMlszavpbRIsQI9LjYezPjjri8ezPg+c9blUG5yNc9WrAZqndEna2etfp3OJL8+6s9e3p514oCS5argkkwfWZa8SvsIiNZZEMxzEu2qs8TYPXqrG7ouDD7jYq8xevfiKn/Gzz8C3Eti34JrJseukxK6Tip+pSYt9Mh3P871dHI9EumTkQkpqWnr+Bf8pvZNABJ7CgCcAP2Eef8K+IB/wBfigB3+K4K1rqGuwVk/bDRoziHaDl3/9z2ByXjs1YMwA7S14uY92G6y9SVfeQV8bRZ/X2M8o7bo7tDK6En/gPKggqTzfkY9Kj5AO5CkSyQMJKm1BDub6SJ6IPM3LteRFZBCm4g2rKZb6iJyCp2W3BbQ0v0Bx1KnpoKIko05WOXe9ku5SZWB7bkj1guDahhSvSzXDicSQmuWsV/3uerUAxCOngyrHFSteucYmprTJ9BcrZrcSLCZqiii7txPq8CdkwVngQlHYGx8OdSnsnJ2TTws7dykClUyjThrsnB1sI/m88f406vNKJl+wMJ9W8uWHHvvblsd3fPT225vLtu3l+PLnH//bs0ve+PCtj5TS7afoc5L63KqKSQ9f3WfnS2vfcxw65Pr+gLhi96r7py7r3e+V6g1vOXb/3fYxWNCk8z+JC8WDxI7aDdzpTh7S+aN2ctRHBOCImuCor+2amSfY89SucCjb2KHsqKdKjwKF1KkOYIHDpXp13UWFzYDDfDjMd6md4bAtaGlP+O11yO4am5ACRlCsds6HP1Iz89LgD6J27SS71ZT04mI1QYaj1LRiZArwIRyKT6VeKdgmu4gxqCfVGeKhfpp1mfcnrZ43d/Vzc+ZXjbprxNDRJcOG3VXLvXVDtJjOgTeqVsMbo0v0N0qE/gPmbt06d8CcLVvmDJk1a8iAIXPmDGmQhakdzz26euCcrVvnDIy9NXD4jJnDCHiz4ed/El4DvrUhHUlPUkEiKegVMpBx2VJ9xIqM684Di3oxFgVBeYK6eXeCw04utSsc2kGT7C7VB4fxcr16FfxGPmy3ChnZHWRkks8OTHInprZjTOqeLbt3EJM9MbVDZ11rOne5ijJ1ATaAdjgp7QUeDdTEbwrmOGgjV4rgUzkmB/WAHhXBRxiPhj+x1HnzwMiqx18adtsa+lynLpP+0u81bumM2w7d9/Hpyk1rR2y7VisRTVzBtEEPXXW12q3TPSPLJtN7K98YYxvz4l+rNq+dOWzB1TO09OuUMfM+/+th8ZGBt9ZFZlVffw09JpqEzJEruEN9Hr1pYYeSroPGLgAbnCb0IceY387WvbbhsqkiXeCvkVGN3nmauSxb6EOt7+3XThK05Ye1TtxEaSiRiYdQxc0YbAWr87AveQpdpCidSpzsc7mBDdnkYRq/SUp64vDhJ5KkLdoJrqeTjud6l9C/3B39Vdvu1bZHfx1/7RiuM17brXWivza/Nl+n2puu3cUtF7q4nKJwPIHLE1PQ/fiRow8nSS/TeO3EZkmrKOPc9EYv/QvnK7u2JLpXe8qpPRx9bwzbdyo3m78B4oiD3EMgpIKzoQVUcbL9cyB7EczExZy5kp1EIQjnv0NUQvPfQfd+ovP+TPTqDoW4FMdeQaEuhdvLqZwjP58qDnSmVBU58Dc20BQeY6jE/IrIh/ksv+gx2WiOJzWD3iiMNdO+Aa3mm9vq3rvtiHBr6Uw6VVs2t/Re7YuraCft4560PWH77U+WC52EHRBlbyEKKVBMYZXa6hUxBMJD70is4DQpwUPKo6OEsGutY3EcdFwIRSxWfM9igo9ZLXhoJZZY5AW3D6EdXL0clPvTyHT6utZvOjetnH6i5ZdrafSYvofBmkadZBfoTBbuATXG2kxjQDJoUwKSKxY3qszgfhXj4Iv+6pe1E/p1OnHdOBe3Biy3DV5HpVI9/lBFKAAW59XyXtREwB7G3nyd6Ddct9JS/G41vHQk6+G77WIIxl7feICXQAny3nr2o18CsUv10vXr8ftp5x/g/s0wkEwAMiHwgVX1z/lpmKZxoyZEX5gtdTjzKcNMi8G3BA2f3I1EbLiQLMW8MTqVFN3vOpv8LjAi1fCwqk0oRlZ4ZJc7HHInUhcXbMN59PAi695x8ekjR/44feTw/1SqGzZsU6qrt3KFtB9NpCHtA+0H7XXte+0j2omavv799Dd0/Lf/+c+3QMeu82e4DWItyKI7iQjo7zjcEeVcGXsLEO8wsQjACidslkeBC9SiGzNoMxMRMjcLRL6L/rtSNN865Gw/sRvyaDJgLBloToKjiAMptgHFaCRqPF8fiWdXi09CLUvWAZPMABPYpSrBcpIHPyDZQdU8Eh56HLByCrzrSZTdEd5mLQamqDbgj+IsVuLliEQ8xSzIZBvO00T9oI6FNOYefcHJ4h+f7Dr2zGJtMsf93FBJjy6c+OzDGzZPFjw7Gg7vqPyfFVo3sXQEl/rUOyOWrH91JdIx9vxP/GmgIxe0JtIW6RCBDrEtbkkEZkRSkCQvkORlCMObYMmrtce1TYGQakfR5unuACID51L8iDcS4DihADEFnEKUgRBDyXIp6fiuDMdyAaKTiJzOMEscEN4ewYcfYgegjrYsdsQB4FBJVnGxYpeVNgBJ3GpienFL5JEHxsMOGPU5jYxhyCPYJnMsV/7Gs6u27nhp2bI161eueLimnBP/3L3/h3nTliw+d3CP9jNdJC1TXnj62SfL1sxesvbFxdLLx+p23729fc5rc/Z9fQR1ux/IuT/YgpU4yRASscS0qJbYLJwdgDoAZ6lekQAYuwoUS50SF0LlVvhQxMxciFkCJloYPLagN5FRuWyoXLRY4WTFwVSMhmVAkqBnkJjkmPpxax44frwi+h2XKoVpeV++oSGrVHuclpfyvbiJzD9sBZszw77SyX4SSW2UW2qj3FwoN4+tvsaR6jLn1fptqS4Qmd9WzxC8s64myUkceSoHcRxFlOSMAXPmyx1O9OVOh+7Lr9p8ZjH6clFxuhTXXjBixbN351UP/tkVztpqvA6PJy8CrxkPZTwUlEBli4nizacRl8erw2aqmtHTpxYrSaABbtRsB8g3QsxJxRfIFERpyvEgpO5Fi7q4fV5wBtlbufHVy9a+8MITDz8ZGH0ztz+6rkvRwik7jx/9uvYXOl168rkDO9cdHDrMxadOjp4JdeH58+TwUe3PdwjzTyuAV+nMVnPIXSSSgNxKi/knG19f685MQIjoFoE5bZk+J6OrCinJLmSK6gPmtIPfgWTQUMHkTmAampkGGupzAgS0uYE4c7EiyIoJqZE7E9BEvykfAI2UCgYKbo0RQoqak7mCpn3cf3lxenH5wLWf9dg55cDx3w+8o52r3Pv08m0vV03fHuBS6OQG2qtNRklGWsP78weO1H498rn2I23f8PGv/3pxW92cu5guDAAdRV2II51JxIwaik5bJWie9gLFXIfpaixFg8CnOlAHiRk2zRfr0cNKeVOwyE08A/jXT5zNtVXacqn5C/GGsjLtx+gebemMGXQq91dqIoglxwA/7cBPPwlCjnw/ifiQo8nAUQuu2wE4mhPwWYCjObiFjoyjCcBRCR1AJhwkuNQ04KcbDnPxXBwwuBOcyM0ENGnhfckBJ2MxMlx1E3ACObLq5OF3B7caJxXrULKoGZJkNi+AzTfnsKfZ8ZiqRfcuPvn3Xf956N5FL2hnP/hEi1bse27FgbefXnGg3ZYli7aqCxdvpgvm72nXVrl/10cfv36/2rbdnnkHPv3kwGNr1z360JYtXMH8Vavmz6l+HnVqKPjNfxk6BejIGot5LAJkAQcS0qw8cCBBatIpbz0qFIQ/JRBSTV5dp5LRFdhZymV18LpmyVb9XAK6BzUL9Yz4dKIJi5BeAkaRU5RGWQKBuJkzcLNO7FByftenmnb6i4Grr4vvu2jwhgOFNZPe+m3W5uULtmVtX/XIK/zuozRXO6md1QZHtfq09DEZKV9/uHzEGOr9cuOxRSUrP/zytG47GCSCQldWD+nQhCYYIEAsYUbSADshlAAvyBCFpRFR8PCzculSwBX83xBbcARhTo7QDWKyhXQiEROgalXCC1ljAEkxh7D8IeH1CljR4AK0ZMOXcYCY0pbGMJOwAq+u28IMfgn/EVydgFf1UZPPT30D+O7RlRMmcGX099F0xhztlxQpRTs9B/fzFN3Af85vYvQl6UjLqlNnZdQZxKCNUPh5iu/TsJvvQzeMG0dXjRunrzkL1nxHX7OokBYV5lBYeRZXOWFCdAk/YMYs6k4GL+CcqT04mvH0ZjCi65nupJFJJJKMPE2xx9CDrSV6SNfRg5uhB4CiSnIIzaU2zUu6C3lKXCOkYElsXBLoCh8PhuKRVYsLHW18CjpaKe4C8OCgviB42Bh4MAWRqzfzdRtq3l00o1dyBc29Y8JdS+bcD1GHtlkmlLy4+9DmxR9PLRwx6oG7byt/Ztq8h5fed279ypVAzwytu/S5+DAJk2vIFhJxYrXCElaLxHolLaR0KlBzHfXK1QWqD35lFqg8Aq++zCRyIOfO0X2sBMlEP70ydNW+s1P11KGnS+m1FzzLGSVpL6lJSu7ZC+swtPGIhZYcsCCVtgWaA3Jvi4WXM3PzOxV2w+KF5FZNbZAJzlz4TId88NVXFwE7EhINdrhJIIPwEsYYI/3s4mauO8xLzJ70D3AkAMd++EQGofobPWiRh/n3GW76Ga2gi+lS2Vr3wcB75MLnyh5Y4vGf2Dhyaj+OD1lvKnr0RZtbU7Sntb9rI2QPnUhvHlLbK733B3dqC7VRXLHr1lG3P9KZFmQM7PigQr+mGzlJS9WGHNb2lQ0fNfqXgxoNFxZx0X0LR515iy6i27R22jxtkdahfbB/u470Nzp11au3T4UMlsvwJ/0M8oCsXvgG4oEJMqH2us0qfJgFhVrJTCi4JQlxQFwBy21UipHAigVMAPdBPsB7AkAo124KlzXr6Wjp07u5G7WvJVE5exN9WhvHUcg9WBzYA+ssZvmhH9Ycb3gHJ3hBFn8y0Av62XLMCwaYyJ3o/kMAJJje2pz1NaLNYwYDgPMpYHagyG0o/slCKlH9TpYioi+ECJuhY3JIxJojvayA7uUDhbGDPfSl76JzJy7aEP2HNo/Oe+HV6jXaRDqoasurivaBqOzZW74hI+HQwv2flK557IGNpcsWP7RMt+WFENs2g22mkrGGZXqAHk8yg+jxgKsYaIgDPBwn4Lk4CxppGiPNBSS4WPVTsYQYDDaF1HQslrhA+4TkYqRClRJRIeM8cMqUoFeNXODVBUj9UZ+4VOp1o4KF/RLEM7KQ5v72I3V5uPKEd17d88MPe1495C/nPNrP3/+m1XGjT9J4OvqPb6Tte7XDP5z6t3Zk1+vSl+fonehnUD7vg3wsxEM6GtKxxqTjwdDsjdUiFKsLUQHzIz7dfcug+FgzCAB3SU/amSBXq6mNjtDWa79DutXxMPVrP36ufSQq2nNa/evaj1pVKc3/Yfdxms94iesPhfVt5DpjdUtsdQF0Q9RVUeSZKuJGYmk4S9EtgFQUa0jPx40kXE/A9Z89/FMNx7i/R6/hg6JSFj1aFl1fShrXHcXo7q2ve/GaJj3itLamsaDtggX38C801HEHoj1wsbfujt6ur7Uc9OUD0JcMrKmlxfSlFSWpTUhMQ5DJ8uFAK/qCkNMUisQzVYuHNIvZga46aaA6yTKzhwRQHCW5WI2DNNFAmy3Uxyfr6iODMchMg5bTwj9+ohYfNzlp364Dp7T3n3g3S5tNz3XSogc17XVuCMjUQW/9aZe0fLt2/Gvtt+PaVzd3pLPKomevm0mHNfG0nsnyKsOjmHSPoojhWivPuGptkqSN9UcUm15lFljDpFGG2IAJQ64DTK3ge1RUNBwQleit3OazN3FV0RJ9PUi+6M2sBhFoJsPG2gVcDX/ExiseqUT/pH/3FsBmKnzXg3rnaMyNHI25kYVdCpTfHctcWQ5k05Vfz1UcwGsL5CiKu3l+AithZpmTXdj5Fq5843OLNlee3PV+xVS6TKpat32F4Dl38q2fxpXtNcd49jPzjzGeWZp4xtsZz3j0jM7G8ggXwooaUXm7nlFQPaNACsE5+y0U4nQQ2PYW13MxF93ALeIejT7/NrCvhKsSo8XRgMhtiQ421jbB2mIsAuBKBg+lGA8jPNN6XrTEKphMOL49lRwY9dntTfYkdYRryeQ241qmuHAjJbGKJkvsdUaa9AKkKhPGSMUs13BinB0jskmv92F1JcLbHCwKM9ooaoQnhwapySPvWc35JS6xqsIqRb8bHD0u2WA7msiBhjzAzebOakIDjS6Jzm7SzVNMN6+9SDebKyRoo2Dszo7ixt1xLGszG1tSeUtsQ0WootQk76nku0ugowchAJ5Lo8I/z94kHKfnUsG/zgLb//7Cupc5VveyXLHuJdj0uhf4/5ivzSAeNF83+Fssgvlm0Y6UUIF20d7VGs4T7cPK+o8+O3nqHx/9iK4/kY7U1mo/nNS+19bTETTpZ+1bmn7q1AmaoX17QsfvyJu/sfqFh/Rp7g3B/9dabEwHLS1DgS2E0cCJBV4jGqgem9wy8AYDibQp1v7+r3Pn/qUtoHNqt9du1xaISv3efT9G13H7X1n28Gv6Pmadby86gFcesOebSURGXvljvEpDXrVhG/DCBrwuNcngVRBLE17Muh2yjbWjZEiMABXIumalyaBOzVjo5Ux+UxbDaZdg5MTSs4O1P7s/cP0lubleOzP4RP8zqakXs5Qju4CfH4nbALsHSamhbS5d29QgsDQxmbE0EVmayShKAoqSQ0qSnvmlM/SuiCE1C9UgSTfzOFmRgapEomMd5uqV4EVYB6BBvN8Hfp41jZqJYBc9+e+zD85YXJGRNSMrbcsqbSy9++CO7a9oD4nb3j847ZXcNtsWLu07oU1C5oJrFz24KjqJ+3PN4sdXge1gLl8JculAyluv/2GTUU2BUJYi47mUhJYdxvbNOoytNBTN7bGmZ5ODLK/FJmKNw5fVvtUWYmY45AdCfaaWLUQhKKG7HcNN0jZv+Sxy9NQf1HP4nw89yE/6UN12cMc3P/2ufXf0i7VVdIX08voVsyue6dZj77rqT2ZP3yqK0vJdz02b9GTXHu9Vb/2AThp3SEJ/0QFk+BjDx2C1UvN6icKHWEor1aHuR0RWmRUBFEQk1naVsILXlBFiL6CDUKLZKrFScnaHeAPzR9Ws14b+skjPhlTJ8L2KtdFd8lgkdOHFWPUD3SWkLljsZaVwiDONAQfLGtWVX6m1xyq0o//+QTtGP+O/bMja+e6h1/H3zw1R3Q8i7v+Q4Z6AUakkHBs1QKzDAI1KLLGiT5j6w0WI9zMW0B2pkJ9uXxD95xTwcdeOHi3shFBKSTH4fewD+EitXuNRnGF2yQjFAACXjWekUEjVqUuNww4hyl7P4t7485erWVufuBTfXofe/9m5r+rkcaOUmO9Q5L2q2XdGVEzwxuyfb8FqIsSQGpfs9ORF4LVZQbGGM7tklv3t4Exmp0v2NXXlKaxthGziQ8fKvDiQmE6RRP9VFAmlOUETDRbPpJb2UhHtPIV2LpQKqGmG9tAU7bVsKUvbMRXIP/EN/VbwnjvxT/wFvv6OZ589t07nb3fgr8LiTLZh+eYwKwYbcUbPpjiMI4KVxREL1f8PWmh3elpLfoI+S1c9oaXQ049pt2m3c8e4D6LLuUnRUDSNWxCdA2sEYI2dsIYZEbupUYY8LGApUEx1DKFbEambWPQCivUDpBfWooirltG9dP+y6MkKUWn4nG/XMCZ6gkvWaYDEQBjPdCQ/FstjeJXn65sUxaRXqAE0G425cCENYBEk4LuTH9bwBv9xwzp+9gjh57K/noszcMI67W16UpoHdlXIKimA7LGSQvlYnajW5CV2IQ9RDphX7C8+FDMpgB5BOexbR2/45BPtbdOrZWe8ZXDdjucf4MVYP4q07EeBkIMd7+NG3ScqZz6FzxLYQ3+2h15EMRXoRl2A2J/twVQHy9VK+sKSS6VghRTs3RXbjClW8fFB+AcEHfj0U9pf2/6JdKLsz+uxvsQd4RoY/xp7YwbLYC8sfQYt4wfQvGE0d9qBNCntDfjC59F29Pi4cVqKzid6fhU/lWXQSc2wGR40IywM7oXyUxoeK2XfuUPYSfeLB4hA2hC9AcELxIWdRZFxFnLyOAG0Qt9IUdgTvINbeeg+cY+o/YHx927AxG8LAyFq5ZMTemarJIUjAVw9xwoZLhbizBDA+PYBD+JSLNIUMPPGgm2mS7Ghp2cTAECvG09hDTcipOaGQiFI0zGtVzsatn/tb/2Z7SfnC0rqXlFNij8jKAl7d+799XcLs/IEV01iQpInT0l11aSkJoO5w59N5h6Bc8zqExJTUmM1n8SURnvPtLNBFTUNgEnEE8hhzTI+AJbnx1zJLEdszni9xNM5s3usQVYAJt+5iFXAwL36IZAWNp85KITP3E35r0499eDsFydxk6Ztr/nC7pwdZ+3x9uyqbRXTx89/s/1/1u2nGU/XPjht4ZzhVJKkqcNG7Xg5eqJ4QmHRTe1uK9+4dMjk6SOPLWOYZzXEAUlKAE1JJ6MN7GVHhvsA+EjI8BQ8YH01iWJczWAMd+uJgOyqV9wuNQHnwPTujOpG2OPSywh2JDkF3Z2LN0CrzDoNst4zyTF5jPowIiDJtLqyy8Zp+7/66o2KzYV2ue2a+1dXPb969rNZUkK0cvhd2jta1Peb9s2dQ9fRjJGTfzzg+5Dys0Yz3RsNuvMO051RRNeYeNDX+ECsSBkRkBYnYAQnS3edNqRFRz8eoMXjUhNBL+JCaqqM5V0GfRKxACIEWHEuHg7NqcYEjbslDEDMg4Ew7Pf6vCbIvbjRv34Zuf9ebvy2uVurNygVO8ZxlbPXH/0PZ849QTveU7ZOEqUFq878PXfvn0umS5L4aEkpLWDymAx0fGrI404dr+vhGeUhxOQhMHkI5pbyMARhsoGux6SR4EYSnKBvVhmU0ZBGnMko6rBCImYROc0L9LKepU/+8sCUDUUV46xdXr5335eVq6umrcpr9/T0qjX0vI/ytGjUEG7BmR9X3z6CBn478OPYEbRh5H1a9ENGxwig4yOQRzzQMYxEvEiCXTJISMWqm8UrxKpuGc1LPIlG+oO7T7QirLZ7/Swtk1WXjLKw2FGhZEMWhE0rBXz61rH+2YZ4/AHdnEZQ2+63jkeFfVXlVV3DPV+f/67223yOm7Hh0UW1NFr0Iw01fFKW+sofvbrd0rs/bU8nimmP7H4X9KkPEFEjdSB+ciuJxDOrwPgjWQAk4WykHFaJCGoDWCyhQIlnExo+rJWEmk0URuJ9TP8QkSVixJLQJVjYvsN6W6ixAacjtT41654M9A06E8JtSsZSTtMq+cMlVesiVstdkmlWeVVJQ1v+MNMTrT9fB/xNJXlkmlEFDIBmmGFzOpPbmpkb9GIVtT1jcBrsL83FsE9mKMZuNl1WoHYAbqcR3XL9co0g25ONyToTcDwZ0htA/2pbe/OKIFOeIr3a0HqnJ6ZIRw/eu7HIUfrDBwOVPum9H7256oWijeX7j1Y+DyqVm/PM9Kq1hkqVjthy7h8f/5odKM0I7Fi75JahtM2v++vH3UH/GFmpNXygx6YqCEtfgI14yAAD41jDuq9yoq9yNvkqb6N9cyE0cZvhp7CCYvMw1ACmTQy8GfNO4HmD+kyHSa6q7FJbuemVymUzZr6YA27ontET/vFNtJRbrTw7f3xUYrq+BTaVCfthc76x/BWVBAOl0KIB5dQbUM7GBhQsiQ2oLRUVFUK3c2+K5Rs34jXPP6L1p3lwTSdQ2ZUwsaI0BQvAFZdCMc5hT99VoMp2PTMG2ODSpeoOGfVRXpdJrCKUje2Te+2urr6hYyqefzStkAoV2shS0TqzUnjy3MTq7VZTeqxHtQZ4jHNljlhdFOtCIs6X8XYiYvA11Ud4OyvNMFZfuj4ktlofWlM5hy5/mNMG0a/5pVr/h6SEhpH0gKglRF8VOWf0P7CHJr6mkEbo0XppbUuFlHDmR/jOCsgH5oJdZGGuyHCLKwXrQGgWqCJKXBjtRPGB4Wazi2Xp2pHlYkUPVuJng6hY+lRzcDJE1w8lVQZ1UVLQgBVZVuN86IsCLSoyfqY+/guUyNtcoVaMt3XeUjmrOrPT9gVbdlU+MmfZCjed/tjsuU+lCd1q7hxbOXPq/O//E13KTX/7xa1LTElStIKbfuCl+ROj5pjuHwH6Wuh+I3VoAJfXeo9BjE2+SPf9F+n+OFtndbryauWyeXPWBIVufx8z8fPj0Ync8p0rF02K2pnu48xmAuznorkq+v83V8X8OEllXWNS1KIsAhjm8BEqaecOf6Gdrdz9cvWevRs37ubiAqdwsupU4BftQ9rpl13ncZoq8Bo6TaOes1obJYiwN4ylQ4kBa6T6ZuyCWApJQCwAybrtcC5WJGyOaWRO5xpgGrt0AabxGJxrxDSJtCWmKXV22cRAzdRNXdqtmrZ63fqq6c9ka6PELzYOK4lhmttvin7IbRtadmK/7wMq3DtC9/Gj+A+M/d9pZOm4/yYfnwKZg63gAgwA4kaY29K/IxW2RixglplbbwULFGGJs3UsMLm6S9zYiqINkxgWKH+2fbtn7m3EAnfcvuZsNpc/6FbEAj+V/pVzD52infsw5q+554EOF+RcTd5R76vHxYGKyI2tBsizcNrHjf4jjsTuWQAO+3TLMuUwxbzHWVA10Z/ncA2d8kS60K02bky5SSiX5k6O+mC9SYA9VsN6Hci8S9SL6GXrRaT1epHPD7gKC0YOI+80p8vuWjFODuI0mJIlKwmx+hFx+BpH0HUXHBtBb71+xMr1RZ0Bz5vUygVPz16377WPN78yvoyb/My8Bx6Y8tIbe7+sfbN8PKXtpPvGTb35xqmZuQ/NmbVp2O3zAd4PXTjlxv4lWXlPzVtcPXLoDInxPPv8T9wUcRDgl9tIxIM8iItBF1GHLqbm0CXWYYpvHC6Nt7SELtgMRHBAZMWpAxhZnwdrhruyC+Xs16f//POA3qlFme602/OmzgX4Qn3aTyXRq8YNFaWhdsfjz3FvwP5Wgow+F7rpfgwtUy+3SmZjk1iE8l5QhFLsrDDJ/BirQ8msKoklFSqx2kqzqlRRI6rNXlm5eNaStRmV46ydlcpN++hb3L3RZW9unjGe5869qd55N8aN9uBX98N+mtWl6JXrUu1n0dyglE2zZ2mlo4RuDZ/NncvnnXsTvno1IeIBuJ6PfGPMHjmcEIfwojXUhH2GVktT3sbS1L6bfj7dSmnqtxPvtihNWUS9NNXzvVND9XmEOEiD94qKHSead+7bd/IelsuaXDVmkwVy2cbSFfzZLJeFc5jLbufMFptew4J8treVM8HfjmaVLCO51YtYBjc8wI3Yq1FcCF4961A7Kfz93d93ljocnKUdLPulQOp44m6hWzTrjTe4L6NZb77JfXnuTe74669HU4ArIeB/LfCrZd2K/nd1qxCdqz3xCA3SrEe1J+ich7X3tPe4HM6jXUt3Rk9Gj9D3tTCsEQTMfIjJxJiVh2tjh9UeVmVEyfEFyHwgTW4uaJAz0yID4F5Fg4tou2yJXveglpv74HxfD4cjrjBu4MhAMSjAT/P5p88lTlppEcdw4uS/Lme2iDc3bGG61aKehU6IN/139axh3MPRJbwzOoXbM4SfeffQhoVGPauvNoFbKfUkaeRGAuZc63eQRCGPzQhBbLMU1JrZCTajk8wwKHYvIM3NYJT6gZ8ebPpTGY3b4lZFux4OWABjdo23gsQK+ya9rt/3/imrXkmae9/wO+4YXjEv9ZVVU7j0sQ/OPL7pVNGgdoceOz5pbVbOuonHHjuYe1PRyZePzVjK9hrRfqV+ViNLIS1bpa569mOUy8ByI6Xar9LuM33Y9yxA450xGtMKaolOo79AjQcaHQW1ziYa+TrFqvep3QaNfhIbbIjHqKc43KrVzWjsRRmJOkkoXpbH+1g+L5kscytH3nXXyPvmJu14rryionzVK9qu3IOPHStfmxlcO+X44++0G1R0atPxGYvHLp1x7OWTRbo8HqPVQj3vIYnkJoLo3GKtR73iUb+SGLHGXWnM3IHmZCyuJyKIZJNQFuylk0S2W1XywG8eQrTdmCbEEKjHE7+edLHk0fdY1cy/Pjn0qvHFAyaUrJ0+5IkhvSd2HXQP/eKBHTfcWByeV+Kcv+u6QV0Kp4/R9zjjvI3/TswmQTJDr5UoaWE1XqyPBJj7D2QY5RK8OcEJpwWWUQniRRWTDL1vns6yGoyWRgklSa5HKWAJJT0D6MEyl15CqbHaEpP1yFjY2d3yfqymKko8uyUrm5vxwd8rq97l+cYyynhO+MdTlbvf58y5R2hOwldfyu+tblZIWbrP/d1xP80BGvH+wo7sXqJn9fuI1FRIlxJDEQnTeAdfX0toimTPU9xhVn/1hmpsKZIZKAyy+1Nk7DwzdMATnLfgUyzoOxUfYoM2QHCbAoULs5QfFC0ePh3fhgVML346Ppl9Wkfe7no1E6ck0KoTEXmrksMAvWGeybTxjjScKQbJmnBmPtyLFuZc867tH5HXd/F8+dLK2U/Y6D7talM4n6cNg63XXmviFpTRtu/Vf7hV+ttSZY12uEwZv693aanz+0ol1kNaDvYWjxUCR7M6fa1LdhA7G4BzIYIM1Xp97ARAAy+vQwM/wiGkzc7GHSN2NppgtwFhUijiYJmfwwV/eUMMKtsdsVq/r0WtH0jx6bUNcGX4r8MyWk03LtOK6b3acPqiNrxCv8GQThWVaAfu06hctq1M20mvhV86jl8revgs437XHiTWNVeJnWEWvS/WOOeJVeYErNizRjqWzOGvxn5YGBnrW7uVtt0ielbDf1jhHn/+J/EP8QDEHj8g1FV6/FedDmPa0QcHmQwx4gGrvGWCidSG8yyZkAiH4WxemN3wWIAW0oXtIs5F8vTRxwT9Zj2lrUvN18dqO8Jf6SGlowtxbq3EPqkW4e19bWX3DovTx2emhPXx7TzZvV2Kc6eTjrrR6C1kvQnf7NiYMW7NksBLjKdVtC3NoVXaaO0L7bBWchudSAVK6WRtuaZpDdqTNGnHM09uELjhk8ZNmjVz8vgJwznhxSef2cEdod2pot2kHdQOaANphPbQ6rW5dD71Ux/E3PnatorNn1c9JU2ZVD2/cuGLE6ZJT1d9xmQ2k6zle/ObiASZIU65YqA2fs2kOfdoJ6j3HkfsgEv10JnaTG0WnWkcXHB/EWlx9xCoNSkDmf1qyCxEuuNM50VSqwWQgPPNeNdlJyahToD0lbah2sTu7I3ExvstL5BXCCQUDikhFxNLu/YA/FPBVwfbhkJKagux4S2YRSHIA1BsGXh7oTsV9D8HhNcJpwKDxUpYrgUREnxT6Y43GFxGjpfoo+fRRBq7naTMkOYakOYRXZqTIAPj6CQmzai2HKTLPVn1l759e5gtZVbhxqG7tg8aP+Le568kzehA/pY5M/relZY4rn/Xtn18Lt/NuV1uvUF7ju65+frb9L7xNGEXPSK+CRJor1tiLblEj0flMfByen6fTMN+ftqHT/Jn4PtWSWvAa5VoA+hKuKoTpz5MDP7H1SvOWIBnd6uY6motumgsLpU37s5m96dIRL8P2CTrFVU9ySoKG/OWJcNmDh6bekfcoNFVT2qrenYv7mCe29syaPDwiUw/F4B+DojpZxE6Kh/Dk/BrAfVqJ+6hOdqRTxqP1tKFdJG2yKMtajzQ50vZHKspnc2xui47ySoX6Gltq5OsvAf4c9E4axEyrPlMKyU68/SZmaGwLq56xclF+UqTi+6LJhcpbqjZ+GL0XX0vxhCj5DOkiLw8BC8FsBeBmEkWiYgYaSQG7ywFiljHCj7YDjaLLKE31MFGAecdwqveUWlc7sxPxoAcr88tmTqzulIG6dnq5FKgtcpSm9g90YKN3RN9heElRuelJ5joZNzgFeeYuC90dgjGvpONe7+DpKyVnWNJLCOspkL8CoRikMogIwVcS7oewdIZwKoN6n8Fm0hEXJWRjiTKCbYrkxiLepemcjbGwysSyeezgMnpsyMgbxmQRffWpkf8rU2PJBhZe8Tp9hUXtz5BwqTRcozkLRTARcMkYodG/eON/YA/gMwukZRcvCMcZ4kPqx5gOD4dIqn59tCX+3QW+9ica22i/ldi09YRo8djrcwpXWLjMR632PtnyNaLtz4/hjtYv1v8GvQbrI/8j37Xl+IP6zO6mdb6iKux490uzRXreHdi2w/A9gMXd7wDLtxtREjKwY435nq+kBq6oOOdkC8oSXtF1Y8db1+zjrfPVRPv8+uPpEhMSvBgB8vfrEoA51jH2xefmKR3vP0J8YmNHe+A0fFOtgFscaVltu+AsEXxymp+AWt+411C3mSj+W33tNL8zr5s55uFkWbtb6m+ttX29x9MaZp64NP3tNYA52+OKRGv9ytBFtivzCQjrtSxzGqtY5ltdCy3Y8cyI/i/7VkyIi/XuDzHqLtk95K+0sw3PwuBVhPfbumb6X/lm5/VfbOwm13uXB/sT5HYcxoSxKMX+uYWVf/L+2bjeRVXKPwzb9B69Z+2ZX75cj0AbkPMJ+v7PdDok8c223EqeohAGO9tUjJCzQj4v/HKlyYu5jFap68L88iXJe+s7kbw/jespYKMPSQB51YvUU1NvEQ1NSnml2WvHwzyv6qoMslcWFa9k6nlRcVV/iddDryxT5x594MkFly4Ux+KIhEyUDuO6TRtPCW28RovT/A24cYEr4mKmuQ4C7yVoL+VUFCbrOd92GdKwCKXLOm3J1yRtJhcLqBuIvPlFxEn9GZSiMX9UUzHAiSHXN8qYmnbmlW0M6xiByKWNsFsfYRYzcy64uQ18xTBInilwUtH91/qFvG/l/1KzU9w2uEpVw7zNiqCvCQq6E7EsB/JcjFtLSz+8rShxbdC26XtozltrdvISy3puqyxfN6Sphhm6A+YwU9ScSb/YhST1hqKSTesZTugmITEFKQnTlaTki8HaAwqWuKa61vs/mKUMLL5jpntCFbxNMHKYjr2dC5h5RmXsPKAse9asPKkNGPbDtz25c2huRguMIlvW1JwsW2ktGA6Jc8Lx7l3xTqIRHns2Scie76YLOjBCJJH0UvMYLTWWKlfv3eosCgMiXCO6fnvSr4vr94gHPcd/dbNxiTA920SltKz4iesDnAjwYK3XgxWfAW1vJFGJsQy/CQ9wzfSd3wmDoZudxz4BwuPrPBByg6JZVO11dfsKUh6dN5017V9S0b3u65kYGF2VjiclV0otu83Gk6MGHFdTudw27aFXZDWMuEUdx5ipAd3BdhMEtmwBi/G+vO1Hj2t9TAx1Vr1cgJrbeHUGc9G59i8EClWeZeRM+q7aioAI2gqmzD46vWF+X1umnTLDSu7FPQW6e33Tbq+yDtk2qRru1y+jvK/f+9FbqvwHST7PPCddRv4en2ItmnqFb7yotCL21qG87FLuK3i3it+fonY1fj8cCFEZfZco8Zn1MSeakTY4Dt7Ro2o3x7Dvu0J877hk6+7SghtpV21t7fq+7zMdS7zrJvhV1VMhi923FGjvW9c53wHKlH+v76Onz3+bnjnijGfUut7+zS8LwP2wpmNZ+z1YRZw0RP2dNoU0cUqKDbjLiCDTEWS2egGu+k0RnK4kfB5zYg3WKCvab/8msYt7bHH+RlrGqRgeUUqVqzslqiWz/ZDJm1vxiiDXTgT0oX+Qd3/V2vqrDTWDFeO2di5cswhmrN9m/YpfAde0Z/jPS93s+cJYSWmn1EREczhMD4KQBUtoVCzpwvFxZ4uZJSJ8UkHism4w87beBegAQXwZ9dSKi8l55euZ//pOjGBrKUNrIYUIFQxxVyYTZ8XN8cEJ+jCYrXPCReVPOE6pXCd31teR+FCxqWarkPxOkapqrSVyhTb002Asd4TD4KHhXwyBwnOMB6dptjCqszjhGItoTlWO8Na2PpIxmcpshP4GEUeM8YaR44VeyHtC5TcOpWTsP4JMvImABdTc7F+lIodjvhQJJc9zSWXWLAThLVRlGOHZg9pseNDWuzGQ1p+nfzGNL197WAPabFjr3rn6bq951j6aXPVxEFamKe4XDVOlwPST/izWfoJ5zD9hICGqactzulq1o/OYNVWfbQyiOOV5ILxSvavecbVk9700ksvUedXxZN7W7pM6br5bS4YPYo/724qLu9s6XJf96+0U5yvbGNZ1mkadDnHuTw/vpUDf3rePCHLY50u2uZ3jx6HRvHPCNew+3X8pFKvjELOh0+w1MMR3/iAL3zWjtnpgfScRSapzng+W+t38qArAA2o9evRy+/C2bpaZ1P0ciG6tdoNPBVgD+iB7M0D/+Aohw/yJnkUnbfiBtpx5CZp65C/SM+HX5TE8f36ae3pP7T2XKI2lFZHf6BzqTaPPka1qUyPEPh1Zc/UIJ3kgIzH597+f+LPPhMAAHjaY2BkYGAAYqY1CuLx/DZfGeQ5GEDgHDPraRj9v/efIdsr9gQgl4OBCSQKAP2qCgwAAAB42mNgZGDgSPq7Fkgy/O/9f4rtFQNQBAUsBACcywcFAHjaNZJNSFRRGIafc853Z2rTohZu+lGiAknINv1trKZFP0ZWmxorNf8ycVqMkDpQlJQLIxCCEjWzRCmScBEExmyCpEXRrqBlizLJKGpr771Ni4f3fOec7573e7l+kcwKwP0s8ZYxf4Qr9of9luNytECXLZJ19eT9VQb9IKtDC+usn8NugBP+ENXuK1OhivX2mJvqmRM50S4OiBlxV9SKZnHKzTLsntNhZdrr445tohAmqEsfpdeWKbffFKMK+qMaijYiRlX3MBRNU/SVfLQ2jkdrtb+DYmpJZzOiiYL9kp6nEGXk4Z3eeklVdJYpW6I8Xcku+8Ie+0SFzXPOfeNh2MI2KeEktSGP8wc5Y7W0WZ5ReWqU5mwD9f4B+6xb6zxj7j1P3eflW+E79+N1ukyzaV9kkz71+Beq19Dlp9msejgssDW1ir3S7WKjOO0fkXGvmJWujHq5HWdvWc0/pNxfUxWKTKRauBgm6YszTnXQ6mvI615TGOdaktNIksebePYEzZrMG88g326eeyVfMcMxSU6qk3uxt0uMy8OTUKA1PIN0g/Ioqe/W//BB7P4Hi9IeabvO5Ok/0Q0mU9cZcJ36T2IayfpmcUHU6a0K5uI+30inaIm/adUcsx802E74C0holcIAAAB42mNgYNCBwjCGPsYCxj9MM5iNmMOYW5g3sXCx+LAUsPSxrGM5xirE6sC6hM2ErYFdjL2NfR+HA8cWjjucPJwqnG6ccZzHuPq4DnHrcE/ivsTDx+PCs4PnAy8fbxDvBN5tfGx8TnxT+G7w2/AvEZAT8BPoEtgkaCWYIzhH8JTgNyEeIRuhOKEKoRnCQcLbRKRE6kTuieqJrhH9IiYnFie2QGyXuJZ4kfgBCQWJFok9knaSfZLXJP9JTZM6Ic0ibSTdIb1E+peMDxDuk3WQXSJ7Ra5OboHcOvks+Qny5+Q/KegplCjMU/ilmKO4RUlA6Zqyk3KO8hEVE5UOlW+qKarn1NTUOtQ2qf1Td8EBg9QT1PPU29TnqR9Sf6bBoeGkUaOxTeODxgdNEU0rIPymFaeVBQDd1FqqAAAAAQAAAKEARAAFAAAAAAACAAEAAgAWAAABAAFRAAAAAHjadVLLSsNQED1Jq9IaRYuULoMLV22aVhGJIBVfWIoLLRbETfqyxT4kjYh7P8OvcVV/QvwUT26mNSlKuJMzcydnzswEQAZfSEBLpgAc8YRYg0EvxDrSqApOwEZdcBI5vAleQh7vgpcZnwpeQQXfglMwNFPwKra0vGADO1pF8Bruta7gddS1D8EbMPSs4E2k9W3BGeT0Gc8UWf1U8Cds/Q7nGGMEHybacPl2iVqMPeEVHvp4QE/dXjA2pjdAh16ZPZZorxlr8vg8tXn2LNdhZjTDjOQ4wmLj4N+cW9byMKEfaDRZ0eKxVe092sO5kt0YRyHCEefuk81UPfpkdtlzB0O+PTwyNkZ3oVMr5sVvgikNccIqnuL1aV2lM6wZaPcZD7QHelqMjOh3WNXEM3Fb5QRaemqqx5y6y7zQi3+TZ2RxHmWqsFWXPr90UOTzoh6LPL9cFvM96i5SeZRzwkgNl+zhDFe4oS0I5997/W9PDXI1ObvZn1RSHA3ptMpeBypq0wb7drivfdoy8XyDP0JQfA542m3Ou0+TcRTG8e+hpTcol9JSoCqKIiqI71taCqJCtS3ekIsWARVoUmxrgDaFd2hiTEx0AXVkZ1Q3Edlw0cHEwcEBBv1XlNLfAAnP8slzknNyKGM//56R5Kisg5SJCRNmyrFgxYYdBxVU4qSKamqoxUUdbjzU46WBRprwcYzjnKCZk5yihdOcoZWztHGO81ygnQ4u0sklNHT8dBEgSDcheujlMn1c4SrX6GeAMNe5QYQoMQa5yS1uc4e7DHGPYUYYZYz7PCDOOA+ZYJIpHvGYJ0wzwywJMfOK16zxjlXeSzkrvOUvH/jBHD/5RYrfpMmQY5kCz3nBS7GIVWxiZ4c/7IpDKqRSnFIl1VIjteKSOnGLR+rFyyc2+MIW3/jMJt/5KA1s81UapYk34rOk5gu5tG41FjOapkVKhjVlxDmcNhZTibyxMJ8wlp3ZQy1+qBkHW3Hfv3dQqSv9yi5lQBlUditDyh5lrzJcUld3dd3xNJMy8nPJxFK6NPLHSgZj5qiRzxZLdO+P/+/adfZ42j3OKRLCQBAF0Bkm+0JWE0Ex6LkCksTEUKikiuIGWCwYcHABOEQHReE5BYcJHWjG9fst/n/w/gj8zGpwlk3H+aXtKks1M4jbGvIVHod2ApZaNwyELEGoBRiyvItipL4wEcaUYMnyyUy+ZWQbn9ab4CDsF8FFODeCh3CvBB/hnQgBwq8IISL4V40RofyBQ0TTUkwj7OhEtUMmyHSjGSOTuWY2rI32PdNJPiQZL3TSQq4+STRSagAAAAFR3VVMAAA=) format('woff'); +} \ No newline at end of file diff --git a/plugins/UiConfig/media/css/button.css b/plugins/UiConfig/media/css/button.css new file mode 100644 index 000000000..9f46d478a --- /dev/null +++ b/plugins/UiConfig/media/css/button.css @@ -0,0 +1,12 @@ +/* Button */ +.button { + background-color: #FFDC00; color: black; padding: 10px 20px; display: inline-block; background-position: left center; + border-radius: 2px; border-bottom: 2px solid #E8BE29; transition: all 0.5s ease-out; text-decoration: none; +} +.button:hover { border-color: white; border-bottom: 2px solid #BD960C; transition: none ; background-color: #FDEB07 } +.button:active { position: relative; top: 1px } +.button.loading { + color: rgba(0,0,0,0); background: #999 url(../img/loading.gif) no-repeat center center; + transition: all 0.5s ease-out ; pointer-events: none; border-bottom: 2px solid #666 +} +.button.disabled { color: #DDD; background-color: #999; pointer-events: none; border-bottom: 2px solid #666 } \ No newline at end of file diff --git a/plugins/UiConfig/media/css/fonts.css b/plugins/UiConfig/media/css/fonts.css new file mode 100644 index 000000000..f5576c5a9 --- /dev/null +++ b/plugins/UiConfig/media/css/fonts.css @@ -0,0 +1,30 @@ +/* Base64 encoder: http://www.motobit.com/util/base64-decoder-encoder.asp */ +/* Generated by Font Squirrel (http://www.fontsquirrel.com) on January 21, 2015 */ + + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: 400; + src: + local('Roboto'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAGfcABIAAAAAx5wAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHREVGAAABlAAAAEcAAABYB30Hd0dQT1MAAAHcAAAH8AAAFLywggk9R1NVQgAACcwAAACmAAABFMK7zVBPUy8yAAAKdAAAAFYAAABgoKexpmNtYXAAAArMAAADZAAABnjIFMucY3Z0IAAADjAAAABMAAAATCRBBuVmcGdtAAAOfAAAATsAAAG8Z/Rcq2dhc3AAAA+4AAAADAAAAAwACAATZ2x5ZgAAD8QAAE7fAACZfgdaOmpoZG14AABepAAAAJoAAAGo8AnZfGhlYWQAAF9AAAAANgAAADb4RqsOaGhlYQAAX3gAAAAgAAAAJAq6BzxobXR4AABfmAAAA4cAAAZwzpCM0GxvY2EAAGMgAAADKQAAAzowggjbbWF4cAAAZkwAAAAgAAAAIAPMAvluYW1lAABmbAAAAJkAAAEQEG8sqXBvc3QAAGcIAAAAEwAAACD/bQBkcHJlcAAAZxwAAAC9AAAA23Sgj+x4AQXBsQFBMQAFwHvRZg0bgEpnDXukA4AWYBvqv9O/E1RAUQ3NxcJSNM3A2lpsbcXBQZydxdVdPH3Fz1/RZSyZ5Ss9lqEL+AB4AWSOA4ydQRgAZ7a2bdu2bdu2bduI07hubF2s2gxqxbX+p7anzO5nIZCfkawkZ8/eA0dSfsa65QupPWf5rAU0Xzht5WI6kxMgihAy2GawQwY7BzkXzFq+mPLZJSAkO0NyVuEchXPXzjMfTU3eEJqGpv4IV0LrMD70DITBYWTcyh0Wh6LhdEgLR8O5UD3+U0wNP+I0/cv4OIvjvRlpHZ+SYvx/0uKd2YlP+t+TJHnBuWz/XPKmJP97x2f4U5MsTpC8+Efi6iSn46Qi58KVhP73kQ3kpgAlqEUd6lKP+jShKS1oSVva04FOdKYf/RnIMIYzgtGMZxLnucAlLnON69zkNne4yz3u84CHPOIxT3jKM17wkle85g0f+cwXvvKN3/whEjWYx7zms4CFLGIxS1jKMpazvBWsaCUrW8WqVrO6DW1vRzvb1e72so/97O8ABzrIwQ5xqMMd6WinOcNZrnCVq13jWte70e3udLd73edBD3nEox7zuCc8iZSIqiKjo9cExlKYbdEZclKIknQjRik9xkmSNHEc/9fY01Nr27Zt27Zt294HZ9u2bWttjGc1OHXc70Wt+tQb9fl2dkZmRuTUdBL5ExrDewn1Mq6YsX+YYkWOU23sksZYFqe7WqaGWapYtXfEp90vh3pH2dlViVSvy7kkRSnM9lH5BXZ8pBn+l7XcKrOvhzbaTm2xe8RZOy1uwak2imNvGn0TyD9qT5MvZ+9pMD2HUfsWy2QlhntyQyXYV+KW3CWVU/s0mJEba4Y9SZcv6HI3Xd6hy9t6yr6jYlfOOSpMVSlSVdVcC51jIVX5Df2ffCT5OLIN1FCt1JVZY9vnjME4TKBDgprStxk9W6ig0lXQmSfXWcC4CGv5vh4bsZn5LuzBf9g7VD4rKBcVbKBq+vPUmEod7Ig6WZo6owu6oR8GYIilaqglawT+w/xm3EruMWo8iW+p8x2+xw/4ET9hHzKom4ksnMN5XMBFXKJONnKQizz4YZbmCA5CEGqpThjCEYFIS3aiEG0DnRg74sQyxjHGMyYw+jjjIj8KojCKojhKojTKojwqojKqorE/z+nO2BO9MUb5nXGYgMn0nYrpmInZmIuF3GMLdtB7J713830v/mvJctXYflBTO6Vmlq4Wdljpdpj/4g/OOEzAPEt3FpBbhLV8X4+N2Mx8F/bgP5yLp9LTVMqgytdU+ZoqTzvjMAELmC/CZuzCHvyHffGqaZlqgmSkIBVpluk0xiRMwTTMwCzMYb20IuRTLDpZsjqjC7phAP6Dm/EI64/icTyBS+SykYNc5PEOfHCRHwVRGEVRHCVRGmVRHhVRGVU56yi/wiSFq6y261m9r1/kMOulwRqmUfQtyt3S1Rld0A0D8B/cjEvIRg5ykccb9cFFfhREYRRFcZREaZRFeVREZVTlbLT68emHkREchKA7eqI3a2Hy2Xq5eAxPgndPvgmSkYJUpLG/MSZhCqZhBmZhDuuuuqu0eqE3+tlqDbLd8jOarXYEByHojp7ojcG22xmK4RiJ0ZwJCe/NrRSxN/pFFVdhyb60bMuyzXbJXrNVlq04e8TuVVBhp0VYsn0S5P6T3nhKrpKCrp9qP1gan7daSjD1/znsjDdmSMpvWQGrZAMyL3Nbwu5Qonx2j70vH+MzZCqKrD1nhe0/ds522Xbzkdlnx6+5e0pgd7x9bdaW2Vv2qf9pyeb4M+x7xj6WpHz6u0gEYRevq7vQjvtftzNXs5aNxvqbsNS/XcmmBmHfev8pgvEFlML3OHh1nfG4nRVhaVc+EwL+XnZek0m3k3Y341tKUpLttxNy5dq9ircaImsp9rnt432+ZB+y70rwVqlsGd7sB2wQWbwvwo56K6fpefU+3n7Fw8teH3ZehL2hGwrLvrGddvL6ftLfzb23f0E3FHazgguvny2+Mj8XsJ721786zgWE/Q8XFfh3uJB8lq6AsA3IuDLbF7Dq7Q8i6907+Ky4q7133XyzN34gr4t9aU9fsz5QwUWIGiiCR4rlceTjCZHLE6oKqqIwVVd9RauxWpLroE4qoi48xdWdp4T6qL9KaiBPWQ3lKafhGqny2srzB6PljBAAAEbh9+U6QJyybXPPWLJt27bdmK8SLpPtsd/zr/dcdaRzuX3weR9dvqmfrnUrfz1hoBxMsVIeNjioHk+81YkvvurBH3/1Ekig+ggmWP2EEaYBIojQIFFEaYgYYjRMHHEaIYEEjZJEisZII03LZJChFbLI0iqFFGqNYoq1Timl2qCccm1SSaW2qKZa29RSqx3qqdcujTRqj2aatU8rvTpgiCEdMcKIjhljTCdMMKlTplnRuZAJ87LVl/yp7D78f4KMZCjjr5kYyEKmMvuoDGWu19rpAlV6GACA8Lf19Xp/uf89XyA0hH1uM0wcJ5HGydnNxdVdTm80YAKznTm4GLGJrPgTxr9+h9F3+Bf8L47foQzSeKRSixbJMnkSverlDibRndmS3FmD9KnKIK9EbXrWI4U55Fmc0KJ7qDDvBUtLii3rOU3W6ZVuuFpDd39TO7dYekVhRi/sUvGPVHbSys0Y+ggXFJDmjbSPzVqlk8bV2V3Ogl4QocQUrEM9VnQOGMJ49FMU79z28lXnNcZgFbzF8Yf+6UVu4TnPf8vZIrdP7kzqZCd6CF4sqUIvzys9f/cam9eY9oKFOpUzW5/Vkip1L9bg7BC6O6agQJOKr2BysQi7vSdc5EV5eAFNizNiBAEYhb/3T+ykje1U08RsYtu2c5X4Nrv3Wo+a54eAErb4Qg+nH08UUUfe4vJCE21Lk1tN9K0tLzbhbmyuNTECySQCj81jx+M8j0X+w+31KU1Z7Hp4Pn9gIItuFocAwyEPkIdk0SD3p4wyWpjhCAGiCFGAIUz7OghSo4I8/ehXf/pH5KlcFWpUE3nBr8/jPGIYi5GmJmjiGCsIMZcC7Q8igwAAeAE1xTcBwlAABuEvvYhI0cDGxJYxqHg2mNhZ6RawggOE0Ntf7iTpMlrJyDbZhKj9OjkLMWL/XNSPuX6BHoZxHMx43HJ3QrGJdaIjpNPspNOJn5pGDpMAAHgBhdIDsCRJFIXhcxpjm7U5tm3bCK5tKzS2bdu2bdszNbb5mHveZq1CeyO+/tu3u6oAhAN5dMugqYDQXERCAwF8hbqIojiAtOiMqViIRdiC3TiCW3iMRKZnRhZiEZZlB77Pz9mZXTiEwzmNS/mENpQ7VCW0O3Q+dNGjV8fr5T33YkwWk8t4Jr+pbhqaX8xMM98sNMvMerMpfyZrodEuo13TtGsxtmIPjuI2nsAyAzOxMIuyHDvyA34R7JrKJdoVG8rx9y54tb2u3jPvhclscpg82lXtz10zzGyzQLvWmY1Ju0D7yt5ACbsdb9ltADJJWkkpySUK2ASxNqtNZiOJrxPv2fHQJH6ScDphd8Lu64Out7oeujb62gR/pD/MH+oP8n/3v/PrAH56SeWH/dDlxSD+O+/IZzJU5v/LA/nX6PEr/N9cdP6e4ziBkziF0ziDbjiMa7iOG7iJW7iN7uiBO7iLe7iv7+6JXniIR3iMJ3iKZ+iNPkhAIixBMoS+6McwI4wyGZOjPw5xFAbgCAayMquwKquxOmtgEGuyFmuzDuuyHuuzAQZjCBuyERuzCZuyGZvrfw5jC7ZkK7ZmG7bFcIzg+/yAH/MTfsrPcBTHcBbPqauHXdmN7/I9fsiPOAYrORrrkQaa8FG4aSvBgJI2EBYjnSUiUwMHZJoslI9lUeCgLJYt8r1slV1yXHYHuskeOSLn5GjgsByT03JNzshZ6S7n5JLckctyRXqKLzflodwK9Jbb8lheyJNAH3kqryRBXssb6Ssx7jmG1cRAf7EA00sKyeDgkJoxMEoySSHJKYUdDFCLODiiFpWyUkrKORiolpcqUlmqOhikVpO6UlPqSX0Ag9UG0kwaSnNp4a54tpR27jHbSwcAw9WO8n7w2gfyYfD4I/lUPpbP5HMAR9UvpLN7zC4ORqpDHIxShzsYrU6VaQDGqEtkKYBx6pNAf4l1cFaNc/BcjRfr9oVySE6A76q5JDfAD9UqDiaoux1MVM87mKpedDAd8CAEOEitLXUADlC7Si+A3dVnov3sq76QGPffTGbJAmCOmkNyAZin5hEPwEI1v4MlajWpDmCp2tDBcvUXByvUGQ7HqDMdrFRny3wAq9QFDkerCx2sV5c52KCuEz2HjWqSTQA2A/kzOdj6B09lNjIAKgCdAIAAigB4ANQAZABOAFoAhwBgAFYANAI8ALwAxAAAABT+YAAUApsAIAMhAAsEOgAUBI0AEAWwABQGGAAVAaYAEQbAAA4AAAAAeAFdjgUOE0EUhmeoW0IUqc1UkZk0LsQqu8Wh3nm4W4wD4E7tLP9Gt9Eep4fAVvCR5+/LD6bOIzUwDucbcvn393hXdFKRmzc0uBLCfmyB39I4oMBPSI2IEn1E6v2RqZJYiMXZewvRF49u30O0HnivcX9BLQE2No89OzESbcr/Du8TndKI+phogFmQB3gSAAIflFpfNWLqvECkMTBDg1dWHm2L8lIKG7uBwc7KSyKN+G+Nnn/++HCoNqEQP6GRDAljg3YejBaLMKtKvFos8osq/c53/+YuZ/8X2n8XEKnbLn81CDqvqjLvF6qyKj2FZGmk1PmxsT2JkjTSCjVbI6NQ91xWOU3+SSzGZttmUXbXTbJPE7Nltcj+KeVR9eDik3uQ/a6Rh8gptD+5gl0xTp1Z+S2rR/YW6R+/xokBAAABAAIACAAC//8AD3gBjHoHeBPHFu45s0WSC15JlmWqLQtLdAOybEhPXqhphBvqvfSSZzqG0LvB2DTTYgyhpoFNAsumAgnYN/QW0et1ICHd6Y1ijd/MykZap3wvXzyjmS3zn39OnQUkGAogNJFUEEAGC8RAHIzXYhSr1dZejVFUCPBW1luL3sYGQIUOvVWSVn8XafBQH30AbADKQ300kQB7UpNCnSnUmfVuV1TMr1pMaCZW71Si7KoT82vrNi6X1SVYEa0ouNCPLqFJ8AFyIIN+T/dgzE0iUIokGJTUO69KpuBMMvmulUwJ9if980h/ILC56jecrksQA2l/AS6aDaI5OFmKat7bdan+r300lAkD0LoNugWfkJ7RNiFeTvHgv7fG/vdo5qh27UZl4kui486bLR98sO/99wOBPNFG3DKAyDiqC6qQppEoQRchTTUFVEFRzQH2NsFt90m8QUejsbgE6/BWmkLX4fd5vAECkwHEswxtfUiCghDaGAYwpgatwgYKG4TlUKoH9digHpejYQwHP0NtmJaogVAjkyoG1IZ8r3gbHWBia+bwxWhFrRPgrS2gmhU1Xr8rIaCCoibqM404fhfD7va77C725xP4n8/h1v/cApslQXqrW0G3H9DSgVJs2L2gO5q7L+9+4ssON+52W74RzR3oLVxHh+O6fBy8GDfTgfxvMd2YT4cTNw4GQBhT1Vq0yuuhOQwPSW9hYllqBE5hgxQuI0mxcHotihoT4K3CW82O9wQiilY3PEpR1KQAbz281Zreu8KESvd4PR5/ekam3+dISHC40z3uFNkRnyCyQbxscrj97LIvPsHXNkPoPXft+Y/2b31x2973c7Mnz1qAbbY/e/y91XvO7l6Zm1OIk/8zy/fo6S2vnom/es1ZcXLp69PHDJ86ZPLGEcWn7Pv3W788tLhwFkiQVfWtlCMdhFioBx5Ih3YwJSSrwMQTamR1s4Gbycq1JyqgRqVpVrEaNp/TEsMjt6I2DLD9Zj+0ZuHphorW5t5I87t1jfSnaZmCm//KTGvdxp6e4Wub4GCCulM8fqcupd+f7mEMYHpGsn4lOfIC50byojNra86C17bOnVeyqHfXTr16ru5J7t+K8rattJLPdO7Zq0unPtSURQ5niUU5JdvzOs3funWx6elhg3t0eXr48O6Vp3OKty3ulFO8dbH8zLAhPbo+M3TIc788JmY/BgIMq6oQf5EOQCPwgg8W/IUeNGCDBjWKn8gGiVwpUhpwpdCaWRrwTkhpxjulWQrvrKFJe+iWuqEuwVqXE9FA0ZLwHk+uJKuuWoy8sJpwojK5mnC6uFqYMIMphcnp9sqMusZS20w0ca0R4p2ZGRkhooa98Nqgxw5sKzzQZ+xIfPzxrdMD5YO6Hn7+PKV4cdU0usG1dW3KpEmPtx36ZPeBuDBLfWHS8k6vf7BzQe8Xuz9DZ87bVLXt9oTHOnz6xDgsTpw+b9Iy4fOBy//VutdD/6fPWEB4XnRBUPc5SsjjSNUeh4HlPibomIsvSivocvwEEBbQZuRFeSRYwQJqnTRV1DffZst0ykQwKfYEp8njJQum/jjXs3KvBZf2eMGzYGoFeeZT3IzPdZw2jqbTz3rQWfRmycDxXXfgcwAIHvbOzFrvxHhCTN4Mm92fTog3M8FmI5kv/DTfu24v6b1hsHf+D5NJh0/o8/T1LuMn4U+YlnwGs7BRt/FdaAkdCggNyCChh6RCHUgO7bvIdlfU9z1QlwWSRNXCektaIlsqNVNi7jnVKdlNguDFrvRMK2xlWRuFTVvRk4dm7Hl7pnCx75px2Ju+Mqbo3/Sn/phMv/w3R/40rBTTxXchGuoBe5kKuvuQMWxfurtzuKxuK3N2Vh/ZiIV0xB46Agv3CLE7aTqe2InFgNCQlmM6XAUzOPmbNPFeEOEvBc6yV3ct8XJuVn/xnSG0vHPO4q0rhh3jOFJJEokl74LAOGQ7p2GkY2ILk1iaiF+RpDWAsJzFsUlwmnFdP8SMiTFj0p2hFH4qk0crBw9Xy9tn339/dvtBrR95pHWrhx4CBFtVjqDokdAODFpkKGRPOt3o27WJDNw4U24JQGACs8IoZoWxbL32oRWj2M1R7Oaws+I2GKVoVjR4pkgpFOJOIYJfsfna2uxe3S5MVt2dZIpR5RVfXxfLv/u2XNg9v2DZPJK/OH+BQEbTvfQA+tH3Bz6K7ehZeij224sXyumlihvnbgJCCQC5LL0Hcg0uiUGR/pxsgMQNQkzThLB1E4FPspzCbZX8qT5yeQ9dTGwNxdP52w4DIPQDEH1Maic8BcaAa3i3MyLSBDRBcfKVFEWzhOcVHps0h1MJrefyY41fYDGmse5GEF2ir7Ij3hrXY9GERWt3o3D5eAVLa6aRqwtI69mbemSv3LDk6K3zuy7Si7QPIPSvqhBuM3SemogRywDF1qCrywZ1OTqI1f0apGkfA/bTNgGO19L4rwGA2WqsQdNj9cwNFM0TJsnuAf58XUVtEGCtlhS5oT4mhhKSosYZ8kgpJjcORUkupNeNuYtzCqumFOwOfnTqm+kjpuRUAR1Oq/YUzspdtn7VYqEtyc1GyB//5udX/jtAa+FRZx/4ovzdCYuW5MzOI0DADyB2Y7oaBXWgizEChN0ClxUtIseKzAGGhWJZDvIsRzPL0XpCqd/EwTvcukmjD11Wk5B77NieYBZZcjA4Fw8m4Ndr6A7sPlr4qbI9OdYEENYxG2jJUDSEQSEMyJZFhiFMPrcAVDQxzJ4pFjkiU5pWLzwpmeqxSc62NcB3ID4M1sSjN/MTduZvBEapzRFPWDT2+hKq2XSnmEynupJvgm+1GJl3+JtfrpT9at1pXT5p7qpN86d2aEOukAvb6YSH6e3rN2jwwoczZ6svrdzlbwIE5jP8DaRdEA8u5vPCKlxbAr7/GCkBVEvgiFQUrUGkHjjcsmi6Bxf8fgVSBWbcjholEJ5JuVQF8RMO7/vst1OnaSX2wn+dGbA56eWpMwtWSLs2iLduzKe/nrtBf8ZHg51wJRZLwXHZPR9/+9r7LxbuBmQWCGIqY1+GtkY7D28Fxy4pkQYO1QaO6OYeVEwNvvZf0qeyQrgkdb7zvpRYBCDAOMZLHd3KXdC8Zm8d7IUO9vawsnH98locnAsvsyUv9ovcUqGel+tWnFffWUukmagORUuJJCtkJKEsKyKTEHimpfOFes7ZNoPRVjFhcPaCqsCZ4NzsQeMqykq/W/PSnTWrcuatpt+MXrigfMEiMX10Ses2H0z+8PqNDybta9O6ZNT7ly5Vbpm2rujWsgKx3sKJY/Pzy5cAEBhaVSXc0uVsDL0hXO7USGlnAzuXUrBzO+FpBAj6L7tBRQ1OXY2u5RF4BqRLxLXB6lBAcvuZl0hlLt5fk00LD923ZeCsvcPHnsi7dJuq9M3G3s9/p9/329B449RpqwvInA7PzbiRt/KbGfRD+nUG7UWnSuvFL+9kP9f13Zt7175YBlVVkMsi4GjxcfCA7XdAE4tnfwgTQInwhIk8kLE7m7Ko3IPd6WX3fCJMQBmUGAAlIsvW7wSEzvCRME3sCjIkROgYu8r8up5LoeRAPzrQTLIrTzG3NT94AKevxGkHOL9FWCBcET4GAUyQCsxgWOKgkxhp3ZpYK6rzlEK4UrlPeIz/Ca22BEs3AyDkwgHhmvhEGIsenDkWKaBKHIuOxC/UD44UelaWkEUo7KO5K+mCUiDwRNVvwiS214nggmf/InYls0Ey3+v6UthY6itchUUF/jZ+QSh+seCVmXkvfmWEPL+Jpbzh8ngYaftUznNjsobP2E0+e/fDsy+P7lJWXS2vm7zouYUDRmdNHvXvlw8f37WzZNSzRfSj6vIZCIyg98sXpDXgh8fg/4LaNpSbmBlis14BBbS4tmYOMS5Nk8xx/JdZ0dqTsL0F1LaKVj88wUrWZgG1WZrmDs/FKdojJFJvmd/y6sqbmWHjEjkFmeclNnCliMQk20Q+cuoJPrHbbCxoizaU9dwl086ZkI/FXHpnrz9jcddlK+1xU/dnPTunW7p91fglsp3uptpReuTt6Jjl6D3d950HUh86mXWHFr0VE1OOM364jUN33P25zrO9HxjbGFu1e+SFtfj7z/SrbT3+9dXJ11BY3fzh4IUvr7+NC7DoMM37/RZdVdbCPcHb9gZuxfpox/d+uE770uXLioYPsOAfDb/nLDYAkBpKKpggCjrWzp5rHxfIbCBzdbCIRPdfkVqrRemToZIffehmvXAyuDH/EGmxjbQ8GHwKf7iFM+h8dujSjdQjxSBAMYCYp2fuCZAEPQzxsnb2BHqEdKZpceElzXE8ieKRSAkrIRpdjc/qCmccshvZkCUjrlRXKE66ivHadz9MHDopn35FD+ODuS/RT2kppsxas6SA3pTUA6XDNzR37Z5z4DopDv66eBqa1s0aNWU0AMJkFhEuSQcYhx2MftKY67ITkrgAd4A2g3OsGzliSRNXLtGdDFZ/OtcacLo9TF0Iq6ZteuJ7qT698T2l9OgKjNr5FSY6y+puLXz/9CFt8/YGeOrLu5iNGUuOY/prNPj5jvX0x7tLv6NfrXgbiM7yIcZyNDig/T9wzJmLCaNirMbW4lG0OVnkFk2ClXltVtoTbzG+tA8bb8JN9PKBs8fK//j6gqRuo8eO9jtFj71OJNvdxRhf1eMW2gkA6kg66kiehrBG/Sk/ixZlvq3RBqcoKoZsTdHMBhdpdTmq/4TrwXzyv8ohwqpgSzKZbAlWbpDUjbRF9fppbH0LPPIPuq5ZiBhW74j1ZeOK7ur1TgQ3lAq5wfvIEJITnMnXqgMI05h2XGPakQSD/7+04+/qIa1RKLo2Sns7rlFSI9Lv7YcbPcM6rWEEmlRZ5A7H61eA7ZLTTVwpRKjWHB46xGtd6R+qRivWEPRhwk1MSCrNoOVlh/H6/lEv++lOouwfkbUV04/Pxi444usL6KI/0arJv9FPWrfHTutD3Elmfe96GPfOUOYZFMqwqyrwqoGTusmC2VqaBftFbKheXXFKfaz1SeayYEppKSkvY9s3QFKDy0g215/3WDNZr0Yb/sORsf4uH04uLZVU/pSfVUAn2M84aGXMZ8PBm+Nj4KRIA+CpvzWUfvlCxacQXXb39OWfS/PnTV6Fknr39umK8iMzlxQuhGp+JJ2ficbMM1x411Y041kyEJ6FPmLtCn1hBEyDRbAOSmAPmPtp7YGRJUuEX7dnyB3lnvJweZKcKxfKr8vvypZ+DKtJJw99iG5SX2PkLfwq+BEZ8QV5bTeNZxS2JoHgzMqz1VbQgCGVoMk/WQFE6hfXdB+OIFrl0rINzJ6qJZa76967j5FXw9YYlMAQo8Mn1Xw5BFE/4A91URCqvizEx+SyoxvtrMcteA2v3S610ZRV1G0vZXvwH/FVFk4yydC7w8Si4KbgUY4trK0WeFLDKG5Axk0JA6mtPQbz1IgEOiq944qFnGYMqai7rIx8sl8cfHcjA7JWfB4ITKqqkCzM6q2QBO2N9baRiFglslASaxVK8aTantNDGYTDq5+JmHSTtmVKluX0lvoG/X0VWYnRb+zE6OX7A3vfPS2c3b3nhECKL9CybcXY/lTWGXxsezHdf56ggA767e8j79IbGBeE6qhQqlfLdnhKi4rXS5YonsBBmILahZMWLeCfXbMQjm0cPaeIeSFW37uro6zXhVmlpO4PGEf/+IMWY591r75aQNeT+4IsLv169NznG1bkz1svAIHRVVGSzPhzQApDZXY3DuVtat1qVFYGxGrYP45KMFv5fVZDVGXZXrKRU5NkSpX/jtdkRivmTkUxh57s3O0etyrjtvTkvndOC6dxIuf2LP2454mpv9ru8VtCy84j+8/J+b1Dr1fzuw1APKpbhxMGaVKifrwi8S8k/2B0hgpbU0JplmJIs6J1y+Aak2AMR9WkyyZ0uLGGd7KflpThp7+jZVUO9jwVHIPeguItRfQKeSr4lqRev5B3rG2wMIZ8s3rGwuUIgNCNxa1sfl7EUIO3CVvL4O6NH45UmR+ZsFarE0boqaeHb4+hHKzHP6ew1ljj8hKQbcSfvqFw7a9xu+ke0vOPG2i/Vvjt3LJta5dtWoMjTw6hFV8WUuaMPnql6OVCkt/p46I3bkw8MXX+mplj+0wfPv3VsbvOTzgye/7aGRde4FK1ARDX6HluK6M4RvplxRDyA9XE8gi6hrbYT1uKwyXbne8l20ZAWMKYKmHvtMEDmmSPZzIb3aDhBMoQa7Q6BnORwWRKAS9z36FzEKtYgrTqmu8HepPs27HllTcltTLlFL2jECSfCtcrPRt37tgoXAVAnr+LQf28o50GJl7vGBM8g9MzujZAQfdpqXqy7iPs69qZ4M2S4Oenq8Rdd7qF/OiDAPJ3uox9DG7B6EANphnOB2oUOo4N4nQfL0RxbyqHuli9YwQ4M9HHGjvH4TVxMPhZg6aY/DLWbZL0aRndtJOeczrp0Z10cykeL31TuFVpVg8IN+90E1PHjr17leFDaA8gntLj70gjBWE8tZ2w8UgcUOTx1ZILhfA6vAsiC7nVU/nyWrlY3i2zKQFkjt0iQwi7HnD1/31kPvb7lKbjxZt0HS36DC9R3w1hHmkVbBVMIe2CR0g5OcM5jWNI9zKkZmhjRBrGY0AaBhdajwdCHxmGM67QqFIadY2cJ1crxwZvkCRhBX9/TwBxmh77Hoe/Tz4ifYoI3NHwcwcpPGmRTGwyFPv9/AzCge2FR+9eExpV/iD8sWHDcnHexqV8vZX0CImW54AJUoAhVk2182YhUttZ+ORZM4nev58uxKnSV7enFJne5+9pwr41tKv51kDSIm2JPci1o4lKBqqSeptnMRZ6BHP0VVP1uzFNJZH4VTQm7HZ+hsKSCQtOo7llZfKcW52L5Dy+7iPkshCv25DXYENhVQ9oaOLGwheRuFOornBL9r2BzWdjs+3iXtqIXAw2BQSxKksoAgAB6ke8pnZCJfHznKLKUcLqNWuAa694Ca9IFARwg4q8yMV+9z5foRI6WXo7jiQRwpM9vvyVTZR+wh7zgB43K4RvxKehETSBqZqzaTO9WFbU5Opo42QgnIm19d9QYROnnnlF845HePZ4ZK1ti3ZWx50kw7GeOzKH93h5vsx9uu/edwv94MdpjXc69NM9dzI/2muiRM19a/NJxK/fnjh+SO6eCQcn7T0nemh0r/XuFfSNicndc99ZXLy3x6AJQzs9u6b33ldpnRd7K0v7di4/3GswEN33JssAdaAuDNVs9epzbDZFFQLAvFI4s0w0er1a5xiSWdCTzRjeqTG1S3SnMX1gJz8mnmNnJNusXi6dycrdtZh8s/TkOEvJ7nG46Mbulfnvdevx9oLVxHqLnl0xU4bgR4vpBRqUPjxVQluUnAKE/7C9qmB71RC6aEqjJLZ0xNFbYu3cBiIzGiYfP2SLZ60RHqfWV4dBBKu/mnG3R98AxjZ5aMhq805p0sEx/6N3J15e/e5P5p3mgqylL63LmdK337ah6EVI2vh73pUdWQuPl7r3HuMaNYCh/FEGiIN6jOHE+g04RYkhhuU0w6moIZE3opeEGJ1hveMM2//2s589neW2TsavmysRCf0DgkwrF2JAxf59Y3eXWMYe+uC73UW56rP/eiOviHhuY9o8kn4HJuZh+i3T+4GN+NPaMxx7P4b9F8awg3GcpZl1jjl7LPcKw0usbQD1zMDvq5f29v56H9cj/WodhigRH7tCd5qNOZiUAv57J9quhITQSSCmyCaX3+MhT12jFdP/N/fsN0G3+NaiwXm+8Xn08rgiG2lkzotH188pW4IF9BsafGrzwW6P9T4tHHtlVZ2lLwHCAwDkmOxg0gzR4hK4FUZI0ShSwRMjQ3Ft+TjfaEiPYyOdpWoPML3i5zzsJF7/1OA0hRSIfwD7cvv2PSWPPByV5u87+Msvhe0FY3fssxZasgZnF1T2AAIDaU/hZ8Z4XWgMOVpKqofzk8KTQzDAC9tfYmT9a+ODGjcV0hsup/b/uHsP8CiO5H24umdmV1mbFwSKC1qSESjawiByjiYbBJIJJgsRDrCQwRiTBAibIJJE8JGxEWPSioyJ4mxEOM5gnI/D2RecpW193T0rNL3Ahef7PekvPTubd7t7qqqr3nqrNtzJQjcRHlHt/DlmniIFYYp7RJjSfAG8O03jojC5SqsVq6yvz17MCdzz242Zn7bKmrV/cVHOmVPflK1bfOC5gXsXU/nyoqbLZ1d+euOfowfnrF6/LHM+SvzX0etb0Peb+D6+HED6xABgpnocZLHy82JKEFB4wevjd8LonbDacJ/tWUF6M5OaFMMiXa67PKRHnfIuoMGSB43PeX5JvMcjHS0i+d4U/KeZU7N6VzE2Bwa2DY9TznO+WhvVEBpGP5m55kjPrHtEHnANScigCDCMjr420OO5rOHxcjqKfqpNm+effRZw9WnSAw2l3xcCDmbDnHV4mMK4ffAE00tPsA6wo4aAwe/2BNWk6B1hU2ycO0VzgSUmgdogepD7rZNjktu0s6alpNKxpMrpld3IZcuagA795eMoulkGHxYgtg5yiAHouGbqgiymIqLWPxmDCeAYiz0d/FGYcgii/qDv6UchmIuGoFoQJk1zCstmeDyjUL/PyDB0+w76aQ5ZaICqkbPQaPKsdxkg2AyABhrAD82Keiyaxc6EAdgcCwAMs/nuMUuVuWUTNewJBk5Qt5p52+gdW82devROPe6lB/AEuMKvSgMEcL0O836czDik+iRVo2ewG644doXSlVnlXzyX+tYf0GiDZ0L+i0uCyx4c6eCR02cvf7t3FlnsbYrLZ0zPG+dNxBe+3VT1tZxeo0t0VmborwZbrOKsxIkIm/ijEQZzz5k1CNZrldNfrVArw9zLOrWS05ds1qsVHRRgGEa9jGQ6qnCoBx3UkPqRPg6rVR/D+2+AqlVwfuuKjDC6dMAYctQUQQ1Hji/hsPxPCj9C5jmfvXGP/FC2a/mKnXuWL92N3VvIMvI+CS2pXI4SqwIP3f3okvrRXeYBkSw5io8tAqaoVm1/tjL8RtBBXRQqrJzFPxxUQkRf6DE7tegLMVFnkiA6Q1Gfn72Q69kTmHvl3S88m5fsHtB/32vF2PwLuZHv/UW5O3s5uUt+l4/eWuutXHOT+xkkS/rBN4+Jop/xH3YOLuQWYfX9PY7/6G6kMXjxEXfj6wtncgKoQ1d2/itP8Ws7Bg/ZvqgEx1ejxq9M/j0ey7NRy6qAsltvYEvhnzXZxUV0BqHQWZXDWKZRB/gLg/XbEbj/jHURV7CPh8CX07e8TlzUpOWRdp5D0rBdqfWlNcZNXpDT818PA8R9tONyb47VBGpYjXC6BeKjKtWvIcCGUhxeUGtJQCPrm0pjK+hRbSCSXhvUcBD8Ga88l69xTyScSx7s6PPZgWP3y155Ycy0Cci+v/+XngWXcz1KwbTx81B0j/7PDpjR97Vjp9b0nDKkS4eObQbNGfz6geE7sjInD2RxXfW3eJDSFuwwUg1zOEVEo46ehFDnUU6NRqBjoZ8ksFAC9FNldBoLs2Nm5tnw027nYQvzfMxocXl5aruYp7t1mvvyhQtKW/J7oTe7XbuQdbZ1y/CWQmQABEvout+jJsJErRXFMESMTBiWuN3oCdka6Qo/xgdoyAbD0SAmkFRApUaTrr91GHku3+rsKZ0478oFfMbb6ecSyVp5EQBBLIBUJqc/HgMSRK7OIxiQImBAlF0ZcpLMXUFmn6yUMiovMiuIoCmAcpPeDIEsVQkN8/98Ub5FyX9y6AXBEt9ktKugYN84OAbEhmK1JsndKzzkwjryWzWsIxeP/blqbbXUqvKilFz1Jzm96rbUBBA0BpDK6diCob8wKB3qU+ffoz5BMoek+NUj6I6VbeSSxNAd9MvfPyAlaPLt33//C5pMSm7jA6jA+5X3I7SWTMQu7AQEDtJDKqWjCadeEZjM/iul8wCF08KcIwhjuq8nUwDTU20M2OV2pzgZhYCO4/uqi6TXmHuuTokjxsc1Ji+Xo3CpaWU0+acUuk7uOWaK3BwQDAGQ3qEjETGgOv8HGFA6nlO1Aw/0HpKSi4qWSHU3vMoxFPIGLjG0hjrQUrXWjeAzD02guqgjhkUbWRZLqo2iDPzDOQqckuxKSUxJSWURk5myRCiL3OLEsw++c+sWPvBO/PVdu6T3yRuJ909c+tfr/6w4+lnS9A7kb+VfDH3+/vvku/ZsBAcoJ6zjE5mqiPlQHdeuJf80nGKvttLxTvONV9HGyyCPOpQxH8y9WTMdr5mO11I7XsVi5uN1plKmchods4nGFQ6aEU+yx7Et3Wi9ajx8+Hr8QRXdunX4QGU7FHTvwYDnvrqKIjpMT/zMc+OH1/9VfuLzRPb9r6I35B+kOHBCe9XMcwNQ68g4OOZUGs4DfVuC3paF+9uyYCYizAI3x8wiG7l9djipsKTIPxxf2nX+nu5Neg/Ydqyg5/LStpE9R0qBJXdS1jSYOAJvfb/ttiA8YyRgKCDr0Vi5F48fEnXxA1QwaE1QaaHkBTNtYdCc1WVlrjqLG/bufljxgvdXfqv09EUNiNYwBFMmajzEwnMqxLnYnGu90Dr+wLGxQg99BHHow8ZsNzvWYUe1nj8AYtBqLzAVJwuvzRBQkO6jKQpiuLjK887l8oOedWcMGgiy6dU5Q1++EvHV13Go/j3XLRQZ+/knzlvraqAQBMMAZBZdxcJctb7/uB+B9qNtPK6LTlBHRtM8d2E0ylVPR6NM/WwE+iGr9gmo0NS9NJrRAR4/Q+S0GWONsYwml5bipluVJOzFlAqKzga0wR+hyl97NUrEATu2Bv50+dTHp+fljF8QiDLwlHsbhxUXB76aFfBRMZIvfX/r4MS5G/NJVTEApufmvjJM/gfUgyaQoeKmzbR9qdRdAeL+ZapgMS4WUECKRbn99i+30Z0WT7XEncZ9mDSnkXG/nEZkczgSOamZc6HkPluuX9uyaEHBuKmrF6wueff8lrULi6aMLVxYlTX9/Ofnc3MvTM09P33qwgVLFq/YXP7+m0VL1s2es37pxjevnt+yagnOy7v1Ut7NvJduzpl9i2lVNIBMkyXgqMkBOOiwHUISs76/vxhulZqqEOKgEz4Ubo224sxSKxM2elQtWEcPZvpoZEc1DNfKZQXH5Bnv317D/ef/KAmPRZM+JCPQ02Q+mk/mnyWLGPKMniEj7klheLu3Rf6OueQUaj93Rz6uYOdgNbVgvbgFM0IdZsOERJWqIKkp1TXqEDDXcHVZWRk1+c6qr6TL+GfA8Dwxy3OolCZDR5ivujp1phNiVT4ptYgoLw9iH+UI4NU8DpOaoaO5OzJ8MFkYFUgBcWnh4ky6FiY1rfbByLQW/CuYkPAqIiFC0AjezJGJT0l7yPFujqlM+JJ+cq0X6ZCjcEOKHWu3nVw+5DllnbqSqr9OvdK5oOzQ5iU7V14/cibzSPsuKPjjL5Hs2V2wctvTi1H0ntx072fP9+jbI/U1VL9Z7wEF6MDJgS2XjN596elnct/DC4pmZg0d36ZFzqacsiH04Z2XP38vf9P0Fzr1bde3a/Yr++rUs47p1Llv++fMtjGdhkxm52Gs/Hf8g3IBKMgHkYyhqauWYNlOo0nTAh7PaRhFw5obY33sxbe1a2UYJSxS69fUZwRBgmG0kutvynmuac/AWtWd3oqThZnMsWOqT+Oa05PVvEZaU+mdVO7DpzbXSLeHwqVoCWeqQc1TeeI+4RAEmYLoA2FBEi9ewkLg8/CeWo9n3UpTaXa8tuyrOdVgWX/6uD8sOvs+knZDm4Xy9i2U/NXAxSiPNJMeQxPpPsaCPPKtkuKTpzdt3f/GyGEjJk0aMTzTi7YiK2qLLFtLyHfbtpJvt0w/jnqg+aj78UPk8MUL5PARPHDDtptHppTe/OPaUQOX5eXOXjZgzML95MOdO1HD/XtR3K4d5N7ecvT8pUtkZ/kFsvv6NTSEawx+Rwrna9kQJqlh8W42szDGjRfp2aocb9fqOlguB8t2nujgV2zXt1OVrt3mzcHscU7JkPSJjhj9AtUkOlJZooOtjltbK5rm0LIcTJbxhBBDz/mzFuzaP2lupz7b9i99bWME+WPTIfWn9h+Kz8bFD5r7Ys7s5MWpSSEvLihcRM5n98trVG8lykgaQfnIY6FIGi29A/FQ+jsBI5SijtUEEMxDs6RTUgwoEMGzbaiCGjaRHcfcHU4YPlXmzZMy0CwUsA1keJ5K3n26WmEQBcnQGvaoqW24yqcyN4IdrfzoEhkgfhCZVagorFdbLBjDfXjKGVbjNMZaHJXJOFMclcmUmDhfHeHpFJR5CFJMKfTR6FqhbBSdwt9rKk2oKE1IYAWXrbEuVheFLM3GaLa1Mqgws8vJxcwbc9pd8cnueLc7SSuecT3vL27TqUBu3YZsxcXkWy6Q6MwKZNuwZ/5LyPx6mGSaXrq565Deo5fhO34yd4nJ5B4Ut38fimUy+RN5W+r3an5eu8SNrQfFmxp4zFnyfNw+tVtrAASzlVipPbfnZuDFJpLI6Zbae1NxuRJbCBgWSGfwXHpugsEBCeLys3LVkAQ1EAt8G2F1uOhxnXXWwEk2x4K1E8atXj1u/Lrq1O7dU9N69JDPjNu8afyEdescXZ5J79FnUnfAkA0g/ST/C4IhHDqzajQxog40Pa7OrTRU4HsoYQa2eQYr9RScKdbA8YK0pWgSWbOLzEOv7ELtqk5KHaRBReQFVFKEiitD17OVao834X3KcXDAADWAo8lQGyoJBC0b272wUEgV5tC0Xg2ofTyMV/LYHMyR5YuNauuoWImqLRzH4n3ePajZ5LbP9uhSvAsFbJw4oBQV4k2TUMTYTi1b93xm2pp5U8ZN7PM6IGiDC/FGpQziYaka424kjk8opWLjg7phWinVkRyYB4UgZaoZgHKPhEM0JICklVSxARtxLXk6rK6PyRxfq1E2XlOlRmqfV5eaID0VXdtSxaoqnxQ8rKpyu1DggO5dMzo/06P4zblLN3duv3bvkoU7S/p06Nxt8xB5TOsWT6UnNX4hb864tGF1GxdOyH954lPPPpuUy9m6efIHuH5NThrTnDRGmRrAcohNBWcyB1GiOWqJl1ayyP3ZT8mPaxVC7rL3b6TI3vdyOligrxoq8GN0MK4Ql3JgxOJPg5J15CdjqHZGzQ6O1mnJQo5Fov7oxRmX2pTtCszcu7ofBXS9i9/cvF6Kqbw4fXE30lS5Cwg6AEhtOeetqYqDQ8RM2iOUcwQBGunPTI0Oc1lizXjRgL+RX1DQ31AoDiC3/1z9e18209V4IpojdYNAcKiSj22IEw4G0HF/UO8eV9GaEsvVWoklvsNqLBMyqGDADNIL7QWWy26nKuEmcZ1MfqDtIavBZaDGE3GI4qDR9xWlSEMLYjURcGvuVhqKDNmwtdDYZ3DbF2KS672RnTsxOaFZk8BFjJ+Mt6MfeEVkWxUx1OiJhZE2sTAS+xdGst3GSAsj0Q/FH6BRFrwdD31m/kwATL9Dldw8TxRBv0XSsF2JuU+iiVOD6kmaF6OaJCEDL/mZucdWlxtfOrFx04nj5E+n3swe0H9kdv9+WVgeVfLu2Z3dt5w7t8Mwetr0Mb1HTZuSDXxfXS/Nlg5DPBwMBTDCQTQB2OMDAZTXlbfADReqP8Tr6bWK6kAAMsJlfBsATOLy8JqhvgDKFf4eFb6FAP7e23g9MsJFKYq/R+CA8ffkACjfKcf55xfx91yWGCRghEvQEm+qeU8sfU8sfw9g6EjmSbNpfF4H4mCwGqixIgNZ1QDLONa+nsXnYIrlSNZ/qs8pjaW7tz77FiYZjdqqJhk054ZV7/C4PoWJL+6JGmcdC8YzJo/O9+DPjp6/vXVye1+1Dt49Yd4fzo5qOHl67rBtf7ryzlsHcnu/gVpTr/epZjxj+E8A42DOwbbALJGB92TKuGo2gIbFPJH6rwaDr1ZAyNYL+5PFAL56WilWcrHtycovKFYyDq5aEe7903ufS1Olo95eNtzbe8yBz/5+AF2ORtlki1K6njQu8n6HZuOPAMFQeF/6SB4FwfA0r58PDJF8hQJBgdzrlqVAdoWCZJ+kKxWqUQ7iL9KwGitCaQg5ETIiNBR1J8dmoW6o2yxyDHWfRQ6Tw/ReX9QnjxzkB1Kah/qRAwASZRa/SSt1vgUnxEBjGKvKTZpyjWTeLjvGV4gFXOJKRpg4vuliVzxmq8cpJJECQbMB+yA13p+IzGgvafG8LoVnTIwOq2JzsiQFNirJbuSopSTvezV75apTjDd7e82LK7YsxVXNXsDJY3dSarJkf9r74bA5D/nJz216cAaN688YtPk7qo+Tu6N+XCEtyaEk2tAjr1YVtmU0Wgw7AeRMKjeh4GCSz30DrXmHyLUUfVQEwb4CX5N2y0TPlcAMEwmYsYlatMr8FqvZx51FWci5+t4s8usX5PuyMmRfuXUrrVUiH44/9/K5B+QSvdnB+3HR7LwixLKyNFM4wWCBJpRvEtu0mWhNo4TSSf9tJsjKkd8wxapl8PT1ojHacy7+HIONGokVEzUbv90Whe01VAdt62ehtuYgmFFHz7WyQxfm9zgx6OqRfofjm7ZcnDIxt/vJwQXjhtyVB1d8886W/KudkkauWtJzi9qs/qaYZiOeS85avazf0GsDRkwkH4IEvau/NcyVe9P5pUBruKhiHjkwB6B5BTs+8zieWSS9EynSDvzRMhzJXZwQxcmzjpR6E3IthHoWTpFvE8LZIBHai9P5VWk6fXH6tXS6F8YKmt8Q1YYV2iubVrB8ZoJgB1OpLioxboMujIuvjeOcnMVj11g8aRSTrg3qHJzQwwCK70nlknafr9h14ouPPpkybvzyY/88Pr00MePt8Te+9DYyvr12zZyEtiVVgV1LEv86c/kEqe/0tWYcsch2aNCIt4qK3x44MW9KP2vh4f79+wwm1V9NLz3dM3rJnHXdU7/DU/r3ypSS9xVEL1wNgOFlVlFuaAaR0JT6x8ZmT2k4fWmjCqh1PKP8ExvhdY2+6kczv6XG6RBHUZCQhULu+opcZzzD75gsUeROcnOszhf+S8m/zfxg0eJ7c6Zee+XNOS1W3O12ZuHRZ344cLLbOBxbMPz17bvm529Q7ORX8mJmiXfVK58uWv3Vgmnvrlgz6tVhLbekFrwyuupfT7fudnrX8vOfH2N2rQvsl5+Sy+itUHBCb9WoMeWNPPIwMsDXr80F6/EU4nN7Dhpq/Z+DppoHHdoNX5iFHvpe5oe35KeqIqS/ebdqzph2xEOOoXTulbVpU0V4C4yMDA2xeYmyAI5xNlk85WDJPAIolZkRZUeXyAbwYyS4dG1iXDLfeDm6K+vRXbVuvXDu4zPGZg1PgJtaMz8x3AJbNaNr8Nnc1JRheZ8VThnRbe7Yd+d+umrcoO5zR7/nyUaD23RdthuPHUz2p7Uv2EUJBN6CJmve20jOlJClrrVX16K0czn4SMzdw0dyvH3rfugBDGspl8D9GK5fiD+b8v+eQWB+hEHg5gwCT+65xxAIjFu95Qv9GQSRAAqrIrWCEybq0iiPlInYeBkwy6iYbPwW8538qJSlEu9dpXD43Vj7sJOTpUwcpA9nPa9qO0PQC0scJ5l9Aa+CFy1ixUH0iD86W/UC/ogy/laurAJWzCbDShRHPkZx3pXnAMEmxgGS0/04QHWewAEqK9MyshsB5AyekR0nit5/yXMqxbyrl4HW4hkoHnPacI2FFAn0tlrNDkhX1YsMPh+fn60kjdp0emJZ2TC04hPyLPryK/QeSZLTSSoq9/7Le5ONLw5Arsd37WFiPzIxB4xCuO+G+FlAQn2nREenr4LX+qHxtiMcrOK4e0O7wkswjSlpdGDjkZH8xgrU6LpLPQbkD/BeK8avN8lvgrf7xoSDDADB0F3XmSbqkd4gctC/GxM1SRW+Skbeni3Nzoga2gAmlZSUrVpVJo1pndfa68BvpuWl4c8BwXbSQ/4Hl8/nVYPN/vg6kUfdNosfY7BU1vvyamgYr8O3hPlS1ZzpyImOKSm+IjX5H/s2t04Na9h6iTeJFgS+R5nz3t1llo1hFV3kCZXraNHaenkcW5vXSQ/p73R3j4BsNZRp/39kX/HFs/h300J1tDBOTxwXuSU+9pjDqRsup5BxUlZa6Iyr7xzDuzbRUbvaL83JP9CPSvzGtyuuVv34x2OW4tBz+JeC+a9V3aKyj2Fc9TfGQN6pwgWvq6hBQ37iTKURFYLQ6Vbx39b6lYaJPgeEcX8sQbUJ7oXjSS0uQvTuNIs22IaK3eZkC7PlD8uTFY1kxDsaGQOrStVp28lyVEC2z90rdWYVy6x6uXJ57tjJk946h9+1r0Ph+1DKfmQustEi5mJvVb0weWX4/Wvk0s1v2O6UXf2tEei5i4FmkAzrVENKqi97G1/Bji2E3UkgRgikW73Pxs6lMYj7XC35VWnLBDVMbwx1THnVpr0ygl/xIEKfDCp96uGG5nDyY41b5eT+6qNMuIY+Byt7zocrl15p3e781GtfexONf1x0Ynb3pT8tfi+jzaVF98ivnq0FS7duW7Z4u/zUqHUOHLYUu7eSpTNHj51Ovpmx98KklxdOHT0qF7UggUc/+Mv7R+7cvv3msoj8dUzetwLgBQY7z3ZLPNst0kVFIRH0jhGkU2vI0XbzVlS6vdUAZ6Oko/Lbe07ZVwZ/VJnlY6ArFi6b0TBMhZhYvqNW/Lv+UIoWsSsJfkE7CFKmiElhhTUMiE1hVYxG6rKlJtH7DCZ305AsliW9PeQLclb68cePdhS0TnCUfImao9Gbyde79nwcXnXtpg0NRZ1mGhFG9dMjCkOHkMXk4IAL5PSREqR8GHf3r4Cq/0p64BN0raIgV7VFx9Ah6nIrUXrrJbr9IsGFdxYUM+BB+imynGN4BcvERAhpjFozkZrCiekP195oT8JZV3dvbJ0YFtWhXZd9+/CBba0GOOKf3SdflfZVkl1HLatDxw2X5cLZu07YVwe9+xIAZn0ClWJDGjihIfSnaSG3z5OLq/g3xbpqeKjMfWnOWg7VnwEmHHFPrtxlqcwkk+JwGvX1u2b5Vx4sk5/XIhYr/31TVuYu8ls2OnXtJC/iPX1Vi5F3ozbXRt9A7fZvMr66kLzTev/PMsLIUVPIG4FQDUu1TGZZbxedk1Wzg1ZmB0XNF9v3GGSrz06EVIhRJ5tTrD9r1TcVo8OfvKrpLHNFry3p0nbdtW7UF/2Y/MOza0XBrj0Fy3ZzB3RZwOj55KOkZXsc1AlFSZWUx/qhx3T47l3Q6igNkQYMEdBTDdHtPhY6VItQcVrfHxpGoRE+ox/AToxYEmtnI7ZRQ2vAj9RXTs/ecvAc+vFmN12N5Z+Dl66+cT3E+/IlUuWQxVJLzvlTwuVVUBeyVCOvN4InUBEFP+yRiNcewNfdzqBz1cDvaBxrsfUTA7YFGqC9DU5RwldvLZVryYAdO0bKqw6tlquO61mBr2JX10mAqg+RHmiMnA6h0EgE3gUfQ7BtSNA3NGbv+lbJTL26Usr95L2qplGrWX29/FfJYAAIgGSt5o86RjQtYIw2UkdSkVnAWbdUYbVrND+A6LVs4ska/gzvBEZDmhRrkmTYsG7thp+nyt8H7d0bgkxcHuQv8M9KNQRATG2G81A4ikb0s0FGfMUq6PIy/yvJLrmklCR0Zt1WkltZrAzcG0S+R5YgQPCKfBV/oPwFQiBeDeRWnoN24RLKVANrs5jcEaZKwNc95mHuBH+wg/y4s6hnt859lL/MWb1mduc+vbuwGgP5ezROOUdHV0fFgcxZ9KMI6GgBK3wsgME1lRMwRz6E3Ya+EAg2aKJKdp67krQeyJJvGdUMI8rkD/IA2FLD8OL0KoWPjuscds8dNjwv71geOdyhZYuOHVomtlfmD575h/0vvTQooWP7Fzp1ZquZSPqgN+BpMEFzlYJJvioVwYlTlYcw+5FwU7QpwSRlslQCjfn5Nu3rQIZeTs/t3SI5tPPzQ19clPfUsEFdI+Y0Gzdo6MantWzRHamN8iU4oQ2fCj9Dh8IDogMwnwzvH8wkPVxA+G2196h5dYpsNg7GRGGOO7TJG9742eym9Runz52T6Xo6Kym66TPKvUmLbG1CM1oaJy63pVs6PgUYRsgVUjOlmrNoWjHo4EkpK7br8CZZD6MhNkwjfdJYk8+SkiQXzrxG/rVn8oW765Rqch0lkOsckyET0Z+rD/N8bTKbb9tgkExSjNRCaispmVqnk7aBLQLbBvYNzAqUqeAGoky2y0kmXmbl1CVtKT+mxvd5eXT3Li9kdev5wuDkzi1auBom/rNzdlaXzpkjOrno3QaJyYC8I+Q7ZI1hBoTxWnYq0IAyueTQL2QamGDMMMqZdEoq0uisoeDTOncqk5w0Xzta7wzUo/OwHsa1G3v3QvKdDUpUb/eEFwe27htM5dz7NNlOrNV/gABfn1GjTsCVGgH3Pq1J+E+agLM8ynZcIK+Q4qAznLkDPd9ryx5bhQuUK9pjC2Hs2LZMXrLklmi2wQoBEKsGBAaJUVEUE8pAnz/EYgZO7EtORWETMqVj2QZr13mrl8wYexkQtJAdqIsBhM/R+3Iq8EaO+r6qBsOG8ZnSUZQtO7ouWLVqwehLgKABuY9awWEIgCjf5/yn5qwrxg+TPKPI/W7z3vjD6DHldJ7j5Jb4OJ1TPOwJYLmlPagDzy09KzvwIgPQx/eGsMf3ogxgUtSA3MSj4We+xi18NWSM6qhQa2B59Ls1qSqVmWXQjcMpDugjeizLJje7Lt3g+eOkm2359UQqtQiWYSeOk64yNJ1mnMN9FvFgUG2eUujtvCxn+LBpU0Zk5kjy4KmTMxsOnpIzBBBMgg04RjoMBparUqjpMyo1XYQZNsAaZUYhvILcQe4VOJ5MRwut6DWePVmPw7T3cbmVjMCtH1tTZGe87wfITe6sRJgQ6TDJs5I8tBIVAqJ6PEWaoMSBBIHsnfyr0tzI+eY4fGncFNYCmq1yKl6Fjys7JJqxA8CrwCpm3/iigY7P2ZhGS7E8i6LDUR8BKRrX5SBF4wQVdGxAAZuoASaYejfm5LDGvvq2I+H2aHuCXcrUUwnrspQNT+frmz+ywMnCgjaGWvpTPflFYGOxgNIZK9nJQamW8ynt3SlvLzY8pH0a0HCyR0b90e2ONdzPTvlL8o/WkD+P5i8BhbEmDam+/vEuiKfrclAH5osOmB97Uux7aQpx+lA1zls+FG6LtuFMNrEGCQzyrJPgk2ObgA1GV1AIlVc28+ax9RMoBkppRKz7vMyDoXCkp981ZhiMGu/k9T3uwIiHXVrtHI9DPjwuhV4YHscubpeSlBLbMMmNUlzK4E/o3zlylrxw5g79O4P6ocLTVdmoVfZdbPsTuUV6zpqFPx0n7V+/Zj1rpcwu9CaWvVVYrqpYs2bN+iNVD7Yw/d1FPVeJrlw0NILtqkuruncxzFqgn+oWsMb7iqJ3ovw5z2JNXpRJJECryqMBkxpr4x5EbIK+dD2qpre7QyTmIl+1i9NX7ULp0i6NOuVM4theTSdehdASGFcy6tZ57suFtgeXrnjQnPLvbIVl5ZUvnCkoWLyQRli6opijJ7H3qlJ65ggykN/JGyuK1q/EVB93V38bwHpHx0MqMKs3WB7Ir5+hh8Z81VzghqbQAlIgHY5C7cLU15ck+jeUEiIAsZ7GZqrHAV6ftDFpSq1gMifTuwLK6+Yy15TDeTame0zmGnEitiiciWyZKYbB+ETJpij28cmMpaY+E+Xrcun7TQMjbWshuSR+4QpLH7Wy57j0pcWyi9XldKY1ZAeU5HYb5cWo/6Sz09eWJXxF/jnjwBKycMWBmeTn+wlHXp9+ZgoatGTbF6hB2iHy0o408quUsaMZ+c0zNKRxdNVXgw2RjVDHTKfTKd1C90iD9efWkyj0ObvQm+wRdK+q/Bz7IzubqBcdzjNv4fr9cnKAVQ4CKCU8LqgHo3WC+m/rRQUoUs8NVsw1sAXoY3o1nPNgSsPZrkAFjFeKupluIoaU03QavaICiMsO7JY9Y3LISQ9a6kFtcl9EHrzjLTn97GnyJuo5bzaqGkmDj4sURD8+82V8wNv73HnOThrJ+xSfBxcsVu085hV1TjRNrkAH103BigcKVhxYJMy0N5wdmVWKpvY7Ojo6IVrK1FGvmH2P5lxJhx9BvxbWAslngSxQU0dv5ARxqR+ZLx/aMWOsbfbsX8kXBpX+BaHIf01YbJs85Y8HDWgeY4vjyHdvxG2NQg1RyNyl+ciAoqO3u66eyF8KMrPWygmqPXUhClzQCI6J3QXFPsfB+kSf2qAR4ghdgjq1AeWjQQNTg5gGUqau9Ri3G/TpSPZ0pCkyJpJNvfbp2ApmaqbGolw1JlasaYjhBObIGle6PifLN+BZkwZsTdkjFvYCvjkwqai10yncBNldTiM9GGKRm64UW69EFEs7dKIdZy7SP1z34Dep374r4XP3J5LlqKPsnYzXZnj3oqH7vZW4+4ASsps1FJNaFI0o+nHh1KLEZkU/o6PJI4qGovuDmMQ0AZB+pSsXAWPFDV/c0uoKeBtilkMbcqnkZxzYVK3cEoclCNB8oI936KKzMlIz62ItudxsN49Noz1S6EEq/7at+Urz9ZafP0TffeH9Hv2Wv9nuPdkcW1v8TB4kSMWKpd/MEvWQ93wIHp+PJg4vORVQAghiqr+XI+gcomCF2BBNBBmsZkUDr2lExXqmghNl6mdVt8LntDhZUwwtoeLXv9lewdQhlM/Qwowgm6cisBOiFLPWmZIF9AbOFGGpkBR6YVXwdqOdXsypFnOKHIFXkV8O9J30I/07U0n/Tl2RpNE3yKWdFvx8jpqzgV7QUFI9XZ2+gV68H2NkQoFDfN31v6HWygnDVahTV9Rz/9o+cTsVay2DuAUAgQkSwt02O/O5HGDmtUMsK2nALNywAHWrcfUDpHhwyWpP4RbskZDxE4+UG0tWkLtHL3+ClBhvMi6PJT99cPECikST464A5hoq8SqUaJgspiLEhKmB1yizNJwiCJzB15jhUHhQNKP06wZs48/a6bMmdmpDxF63gu+jteBjalTbDa6KHDx9jf7hul8jC/ntn9TE9iEH0fObtu8uJJQVTb5D1pKlxfjO91f//AAtRfFvLJ9XjADBblwgfSMxD7yeLk/pYBAc8mM1f8MovrigiHe6GYkGww8MydHFVJpjd6it3FfGmTVR1cMg5sL4rvhgn21dJ88b3nPYO6Ctp/Qe739SF15VA7RePwFs/v9THxSepXosG4WL0v/fDiksQ1u+b9+1k1P3Refnzhr/0Ue4W1kZ7ZQy/HB5682JEyeOKKximV7ez0X6is7HAcN1QGeUWOIu7l/iMC3+rXCNgoNsYCZJqyLXhuZ6iJxTprzUYm7Pyw8eePbtQ2cOjkFNPcoo242JdGx0qH9461jr3xsBINgir0TrDK0gAELoGLVTJgTiTSe2kjwDDK36j8pZsqDXW8AYpfTwg2QHA6ToyE8O/xaSsoIeoZKWYsZdFWmknESKoD0A3ifFPJ4b7vBPotgFbrjNHsa5kGG2x1PE2Zf+99zwxzLDq3/CG+no4iFXHJb46xoaJXwu6+Z1ZD6sgq0gZfozwMFYwwDHIgPcj/qtRsazLMz/CQMcXf03DHDM/HZ8XLI/8osajn/zixr4Mb+oEWzw/0UNKkSxbkQjDrMR9504sZgsNaA528jCT8yo6YI9e8ZiA3Gg2PqAoJBanmAp7om/dyMFexfiuczeSFAit8VTDNNA4h07pold/msgsgxjH+NIYw6DyHhXtSMZuA8eiSWfKWpr1nj6GdAHRgJj8AcIqGEo9QCMeiZVXaOelG90GUVk7+FJQgdP3pu2YHTXjqOyO3cdPTCpgYsDfIZpx/7SOXtEty7DKcaX2LJBfGJydXXNr/xgA5g5UtQQQP4r589Gwtj/7hdsrsmIcjrYYYuMcnXrxmpoQeh1pviltErr+8ycvuk3baDHiJ6s6ze1dpe2b9e1/u5C/nbl41/QV7c/RRF4YxGeV9sDHG8kErL8lsl6gJPo/7fmgoD+SawHU12YANTREvJtgv8hMpESmD8Wzg52E8dM7EIAjypUbKpp8xoioER1tJ6kYj8bzcDTABTPJQ+EdlF793pQXfkGuS80jZJvFBUV6bqihkNPHSfmkU6R4UGYh3JiX0fOgzIwT0To7FTh4wrxBU/hfaOlvQ9O377NmqeSZg+ktKorUloR6lhSQk4Aqv6R9vuYqrSFSJguNEvQ7eBibw8haEM+DF8FBWXqx2EWFi6A+0yKj3jH3F/0/zV2FeBx3Ep4dN7TnYOGMzc5s8PwHEOYmZMyM1zytYFXZmbm1hSnjD6XufUXfFRmZmau69snjeRZ7WkLHyS2/N9/o9nRrDSSZpRhYA6QvIA8IHW9uUA+/bQ3G8hrr+l8IA9fnerUwQ+25OqHL2bcdVUlhci4ULW0bxaBWWwMq4eYP9lvsl9UFKcMQB/JniA0jYZkfx+6ntBNsD2AeyA30eWEbofNbILFPcAx0Lyb0An4VXAXpHFnOz90lMj4KfFfSp9oY8vYdOsTA/gPaKzeJ65Qn4AIiGt1rFy0H52aJSsoiPYabD+WPef+LNqxTkBkmmgfqnQJ3WwGxMx7A6QdG30kOy8APcCHnkHoJrgiAJ3FTXSE0AnYJNAFaegcTzvuOwJ3KkozUsnu3kz8FMNKhrU0HQCh5Qb6SKgjNF2PSXKFdj8VaJRdo5vcaQHcUa7QLwn0PpEIoRPuGk92QvcRsseU7CprOlrOP7TldLMJtt615WCuc7TKWm3xK1ijRtNBimRZNBh9JHs3AF3uQzcSugk+D0JzE11J6Hb4mE2y0BWm3LyH0AlWIrgL0tA1Qi9jtF4w0zOO1vG6p8Np/JHPTMZQdht9JHuY0HSoIZnnQ9cTugk2BXAXcAPNuwmdgB+80UroIiF7hZYdsw2jNJO1NOcQP6VESPbV0mAe2XBKoGfrkfcigEbT4f7ksEwLrbkPDEAPN9EcNJpD0+EBWGYyf0HY9oRjYUf4sJtJigS0AEBBGnoM+6FjvNQJSbIHfaINfoS+1idGCC3W+z6xD34CPZho/FK075maJXO5iva52oNNRQ+GGUhRM/O1HjeTZuiAbjKOmrHRR7IdA9ClJpoDolGPewdgmcm8mZgTcBHpxkNXCd2M0v5LppQ6JCxHxwXIPutC1+dhJD6sJbkKINRgYI8scX2+S2K5wrpPC6zYl1dY9F3Vrs0cZQr9qEDPDm8idMLdWaAL0tB9GfkulUEQLWaFspj9HEuWPMWu8vqhvlfqpyOk871PJXpQZjD6SLZ3AHqwieaAaHw6hwZgfXJ8Qdj2Ax0LG/dhN5MUCbjGe5KErhAaGaE1glnKUO7ddC+3ktx07zaZg3Lb6CPZzoSmNVQy10RzQDT2cl+bGbVNzJuJOQGXeJITulBIXqYlxzxaKMteWpYSAJ/PIskJvVmjOSR2Ina8ByCxBYK91JyN8K9o/rIGtrIpkJtWlqHfG8bIDz9InmjN6ihizctOwzQWmSMDiLkFfmANFnN/H/MrihnR1wKzuIcLNFbqSi3FSl35UASHBGx10L4h6chXYkUe84lkmPPm7GfkxUpxik/X1co1bqPkx3oLIvoPATXgDUrxT+ib0Mhq7zjQrWerQl8bRY0vWd+LDgddspqtlyW/fk+EbsU85amlmKd8JDTAJX+Wmpz2Ant/GSp+GZqD+6JqJdAZcgr+RsLyoSKNYYZ5tHGUL315rZm46M/Tl6fposbLZl45MBKUzbzMU9A5Oq95pHp2UGJzT1/f6BTnrqvqi0V2UrNjHAVb2C4Q8+/3JOP6zY1ZxXHMzNXoWhozahVK7xDi3oW4m+CZIG5ucHNAbhztkwOYmclcRMyt7K4A5grHlLoLmRW6JEDqShYsdTN8xHa1uMv+QOrmlcxiLtfMWCMNZ9ZDNHMrm2nNkko0s9h7DA/nIaiGeYh+KuOFcK74ufMbmfIrHpdxCvGP/GntvU/H346H1na+Lf+EKcGWitbOp8Xf710a3ycu4vv7Suw7olX+s5e37uC/0bpjDVzGFkCuMRMnT0Jv+QdpRrBmT/JRdBkojljNHCkm5hZ4gs20mAf6mF9BZoU+F5jFXebjdoi7la0LWFvlOubcpAu5FXoSPntrboJVN29NLcXacSVwlOX99Gl0XzbgHOsKtDpsWaxDiFR0NeTLrtfH8xX5XvJeqjGX7g99Nefme+P9+p69jPpzNLzPOwxL0eENgdShmKO+CkbCcWCfEMFXruwErRrwLgIec46SkJ3DcvAE9DBxGXbY08OEMQ32upNjnk3vrFLIYv8N7yoeqU3rU7Wdxr43iX3Gh3PXM6+X+7+W+tGX0j7VpRPaP3Z4PXV69e4OK/u6zExvH9qgktsHrMeb4TY207KZbB48923+J0u3GBrTWIEPvcVw7eO22Z6I1pCYwR6ZFyoftxNY88caH/NoYm6B79mukOtn7ijXowKZcQwt1OhTaAwRd0eNRBN3EXG3spsCpK5xDKlxDC3U6Fqw5R7RK3ePK2sSKm4QfottTLVR3y8nlk1sOOzql1DPcihKgE9shNbrtzTKqdYMRVBwXh6ZLtCLNHoQmw6ZICYfHTHF6D4AEDouMooiFe3uJDbHioJEVJ/dZoHeN/yZWhsguhxCVp8jTKHvF+hT+G/EvcadQp7UO1MU1pI0CfTB4fuRW6ErgfvQhQb6C4GeGSkm7hZ3FZtpcUc0+jmBHhp+GbkVejmAxa3RUJjalR0T7lDcwGHDR5mCozu1lB2KT3Cxat0usbcJvjMjDsnRCoMC4kJ9tc08IN5evwpPimhZESs0EiTLhWIevQArfy3G9iXsW2yvExZ5WqROsI9ST5CdwOo0O11iTMY4sstbB6HxaO3XK7Rb675irSNytCy39rjhMPZytLbIK9AiLxSW2g9H41Ldno3tG2TtQhx5Y3S8rJqNtWKbUT0nktfnx2HccZlGF7KrfJYyGFeoJIusi4jc6jtX43fu0uPKPP3Igu1uN7arOopJLYvEv+h0QZY/FoPM0qru5CFABkTuHM4VP3fGo3KqIP65Nx4dHRWzhLujYsYwOjpVlI7ufDvK1t2/T/SI6MnRjHX3Ph19WwKWRuXkQX5iaXSfqJw8SIpvBJTmDWYfWtmjPZu1BG0clATY3thzP43lcRTxO5L9yOp9HpWi1rTGTuEaW6H3CPA2MU+fsgaj4kZ9PoN6u6DHlbn+FQu212K7kqWeZGlmeazBehMMNP0KB1rvNx/PLEnyKZogsQ7J/ZS7bzgPuNyxMSKC31BEcA18yqZBri8iqGc5tBJ/kFbtaw6m2RZt/QzSWGSOZBFzC8tn4y3mch/zK8iMaGHBzOKO+7gbiHsjWxUQx6yO/iBut5n8LvFvhE8CYgjlmT90DNafwCqGaB/1+omfErDzUOzZR+g5tI+dFRruB/C9uyR/lraPW3pcWSFRcaMdHIB2sLLHlfn0kQXb3Z+xXclST7I0QxtrsGQZpO3jACHLfzkgC9rHy8ySJIcpLNY8ROYG3csLWaNleUN1LzHrPvZyF41eTr3UqfclOtPkbiTuJrg6iJsb3ByQG2chewQwM82cWiwrNSKzij22AkiO1GxZFUBxYPte7i8S3+MSXun7SNTrPj0u4Wk8BkjeDHey8Zbkw/9A8ua1LF1yiu6OFZJcjU++UX/jwfiNmT2uzP0v2ndV7bAZ28eKnhIee3QJgMSnFoeuNfDHwtfYjvua+DwbteTtAZ6kv5IcKw58wY8F+lZ2Zfg8isyXU6y9HZ5kE6w4fr5jRrm+oIhY+56O9daLMTOK/xUxr4EuikARc0euHOfE/CAxr9mb/A1lz8uRWJJ5ADG3wNdeBIp2d/N9zK8gs0KfD8zijvm4LyXuNraQTbf2HvI5RdoUP9+D+NvgY+hrRf5ijvY39B119B0b2Szc37D2TjqKvO9w+oVd+o6N8A76NCtuiZfL8H5h6nis21kKK8E7GbZD0LqLMjYVysQsnU6uPHnjX4F15KbV7s3mPG1BZRX3PO/063uXUEvzzSqfZVe8N3HdvmrZtN9KZt1BFdGzj5wJdK7wT9ItxcUv8az05eMf3PrTacfFBn9WDta4yfHfwy5L61Da1dTsjOe8NeFNxv1UWgJenDjIV7bCdVVlURyjE/WscjOrT5/z074X1qBA77KHRleSz6XcNMmBTKFxzwu5Jys0XBa058WN+DEHih83VREzxY9jJjPvJuYEdJF9evOlLIfsU1XjxDfoFP22OJtkodUSzbCwbgO+W/bW6LKAmH0/fLdobv4LcbeyIwK4sx2Tuwu5FTozgDubGdyReuJuhptZg8U9kBvcHJAbvf90ZjHrp6NyAeKe96mqj6HtdpSI9kcx8xiO77M0+jhAbtPkk9O0RjBLXuQkgT5d6+9Tdoov6ie5R2huzOyE2j5XoxusnR16k2uLHUcWOys0IsBiY1HDYpF7D4Vm5wfMhQbY3LqXjwTMs/Jsbo0uDhoNJjfvJu4EzvEL0uQu9vaMNf9m4k/gfmSBT3YcEx2D/mCXeRb8GrCO6IPyW/s7An0B2GMuO9NbUU41VpTN7nz3VXtnyovk8hUoyVitm2tZvbUWztaSYDU1lGS5Rt9pr2goar5DapXcg6FzLDewkwF3clKr5K4G7Q7fAFsBtZJqdx5B/GRsv8l5BAD7H5Z1YrD/2B7ewT2AtPgwafFG5wE2x9JipqlFfgayKPQCyLK0mOXzieXE3Q4XsQmWT+znmE/oC/KJ7WWOD0saV5VCnTu4tI9yOBk6YkYO6T+vATQwJk/1yX9yM2I62U6W7xScw/tjGcj+HP+MlxW474Bf/7Qq7xW95UPrsL4XlmOozatlXnUv545HVSVRWVQ09SuLPPTo76t7i4o6z3WPwnKiA2RxUcbFObnfb9GVRdXc+r/YV4z8Qw1sZxtCc1kEZkKreyBEoXP0YB3BzwFwRuOzH4bPeLt7eupktKGlPhvawE7QNrTUZ0MbYBO235razZmD+KEaPwH6yEiowH+P+Pm6nQP8H+dLiG0AeAFVyIlBAzEUA1EjafSd9F8ApbIGcr3Zw/Ja6+t6vm/3rCXJZSo7SApPEpDdC7SinPG3dkFRYg6DhDaArzJJLFdQ1LOZGNtEcjIz2RQ2QAUqt626tEoiK/ZSR5J9xMzc9zDQItDftdSC+w9Alz7xTheekvJReeozPUxQQQjjcqJ/+cSLT+XVHgI57X3miegMwgkKrPUDInsISgAAAAEAAAACAADiktOWXw889QAbCAAAAAAAxPARLgAAAADQ206a+hv91QkwCHMAAAAJAAIAAAAAAAB4AWNgZGBgz/nHw8DA6flL+p8XpwFQBAUwzgEAcBwFBXgBjZQDsCXJEoa/qsrq897atu2xbdu2bXum79iztm3btm3bu72ZEbcjTow74o+vXZWZf2ZI6U3p4f4Ck9+V8/0S5ss3jJOpDI1vM0D+oI/rQz9/N3P84xwTRnKQLKCpW87BvgxH+wNZGhqzh74/SnWlqouqq6qMar1qtqqJariqt/ueue4GjpfdqS+9WSunMDc8RqPCqQyM5fXff3FFLMO4WI0rJFUN1utRTIw3c4U/mdtkIGWi6P2mXJH8rc9uVk1nbNwJ4xDd++VyH83lUU6Pp5HGfTmosD9VolBBnmVXeZK2/lCWh/ocp/x/aE/1cDbiJ+jzjvr9FFI5jc4yi25ShS7+MSrrve7Sn9T9QIn7IrtPdlH+wNmFwCIZqO8vpZPYdynd/C3Kw5Tn8H8ZwPzwPocngRPDbxwfnmAfZXt9p7r7ieuUe8YRzNLzRdJdc30pneLNytc51H3FCvmcjrq/vkkDOoUVrAgP0FeGMi1pqPevZLz/h5lSlx7+O2qqqvqZTJL5rA9fUMvvwwqt6Wi9PzFcpLqfvlrPNkkZmicVGKZ7qV2YmP0otelg+ZM7uVQeZFHyAE3leqbKMurpvzrJ2ayK6znY/ckGGcV6acYR/niOiIu4UJ8vK1xA/0Jteri/OT/O03zdkX0cp9JHlmssS0nlJ+b7kN0cHuaKUEIaBjLD8uivYYI/gTPCo0zyf9PVd2Qq/NPVffdP+VidC5NqLHXr6K46za3hKP8y/f1bVPYP6PmNLPR9GazqoLFV0hjLWu6SNhyaLOWy/43l8kIvKiQnkspUusU3OVSO4AQZzWGxPl1iM71ezuU+aJ2H6vkiKrt/OM9ylefS/hlWs0RrdK71hnk9dlGpZC6Yv/w52c/m2S1KfWweLpY/OXtffXy98gvVq7l/N5Z5t1jmXfPnFmWeVb8Wy/2ZPap1W618TnV37tWNZT4tlvnUZDHYvzemxWXrbZHau3F/ulm8to9t0frbemyL1BxZ/2m+btM4zlHeqjxb+bXyRc3nfu6H7C/llckabgtvUmJzwnxns8L6VZpygfpuhfIKZTujn8fZYnyGs20Ny8/GlIHZ3VYPy9PGtFlj/V7KVqXsZfPHZsA2aR6yOVHMR/i/1dvqsL20+WYzxjxidcvnnM2ajWk9bz1uMVh/599uzPxflkObszbr8vrnzzbhBRqTaTB75O/mNf4PGySVPAB4ATzBAxBbWQAAwNi2bfw4ebyr7UFt27ZtY1Dbtm3btu1Rd1ksVsN/J7O2sAF7GQdxTnIecBVcwG3NncBdzT3IfcT9ySvH68E7zCf8/vzbgv8ErQW3haWEtYUdhOOFm4QXRRnRJbFe3EV8RCKXVJQMljyXxqVlpL2lZ6QfZMVk/WTn5Q75YPltRTlFF8UmxSMlVk5Q7lF+UdlUGVUNVX/VLNU2dVo9QX1fU1SzRPNN20W7VftWR3VTdKv1Fn1T/XqD0dDDsNHoNHY0bjE+MeVNfU37TN/M2FzNPMl81SKztLBcs1LrHOt2WwPbeHvOPt++2n7CMcQxy3HJaXa2dD5w8VwVXT1dM1zn3Xx3ZXdtd1f3ePdSj8TT1rPcG/D28j7zLfEb/S38VwMgMC2wNsgOlg+OCF4NZUObw1XDg8KPI5UiW6KmaOvogei7mCtWItY+Ni52OPY9/n+8U3xN/H78NyNmtEyBqc30ZUYyU5mTzJuELBFOkESVxJVk1xQvpUqdSWfSqzMVMquyweyA7LMcPxfKTcjdy/3IB/Pd8g8LwQItzPt7GVCBbuAiNMLecBJcCvfAy/ANEiM9ciOAKqNmqD+ahlaiA+gm+oCl2IMhroJb4gF4Ol6FD+Nb+COREQ8BpCppRbqRQWQmWUMOkdvkI5VSD8W0Kv1TEDzACAEFAADNNWTbtvltZHPItm3btm3btn22hjPeGwbmgs3gJHgEfoIEmA9Whq1gJzgUzoab4ElUAB1CN9EHFI4ycQlcH3PcB4/HB/B1/BaH4HRSjNQlG2lJ2oBy2peOp8voXnqFvqbfaRzLy0qzRkyxAWwyW8UOsjPsOnvHfrEwlslL8Cq8ARe8Hx/GJ/Hl/A5/wb/waJFLFBLlRFNhRG8xTiwRu8Ul8VqEiHRZTFaS9SSTveU4uVTukZfkPflKfpNBMlUVVuVVbdVcEdVLDVIz1Xp1TN1Rn1WUzq0r6Ja6kz5tipo6hpheZoxZavaYy+aVCTQptpCtaaHtbkfZhXaHPW+f2f82xRV2tRxyPdxoN90tduvdbnfJvXQBLsmP8Qv9Wr/TH/UX/d0sCRMZsgAAAAABAAABnACPABYAVAAFAAEAAAAAAA4AAAIAAhQABgABeAFdjjN7AwAYhN/a3evuZTAlW2x7im3+/VyM5zPvgCtynHFyfsMJ97DOT3lUtcrP9vrne/kF3zyv80teca3zRxIUidGT7zGWxahQY0KbAkNSVORHNDTp8omRX/4lBok8VtRbZuaDLz9Hf+qMJX0s/ElmS/nVpC8raVpR1WNITdM2DfUqdBlRkf0RwIsdJyHi8j8rFnNKFSE1AAAAeAFjYGYAg/9ZDCkMWAAAKh8B0QB4AdvAo72BQZthEyMfkzbjJn5GILmd38pAVVqAgUObYTujh7WeogiQuZ0pwsNCA8xiDnI2URUDsVjifG20JUEsVjMdJUl+EIutMNbNSBrEYp9YHmOlDGJx1KUHWEqBWJwhrmZq4iAWV1mCt5ksiMXdnOIHUcdzc1NXsg2IxSsiyMvJBmLx2RipywiCHLNJgIsd6FgF19pMCZdNBkKMxZs2iACJABHGkk0NIKJAhLF0E78MUCxfhrEUAOkaMm8AAAA=) format('woff'); +} + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: bold; + src: + local('Roboto Medium'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEbcABAAAAAAfQwAAQABAAAAAAAAAAAAAAAAAAAAAAAAAABHUE9TAAABbAAABOQAAAv2MtQEeUdTVUIAAAZQAAAAQQAAAFCyIrRQT1MvMgAABpQAAABXAAAAYLorAUBjbWFwAAAG7AAAAI8AAADEj/6wZGN2dCAAAAd8AAAAMAAAADAX3wLxZnBnbQAAB6wAAAE/AAABvC/mTqtnYXNwAAAI7AAAAAwAAAAMAAgAE2dseWYAAAj4AAA2eQAAYlxNsqlBaGVhZAAAP3QAAAA0AAAANve2KKdoaGVhAAA/qAAAAB8AAAAkDRcHFmhtdHgAAD/IAAACPAAAA3CPSUvWbG9jYQAAQgQAAAG6AAABusPVqwRtYXhwAABDwAAAACAAAAAgAwkC3m5hbWUAAEPgAAAAtAAAAU4XNjG1cG9zdAAARJQAAAF3AAACF7VLITZwcmVwAABGDAAAAM8AAAEuQJ9pDngBpJUDrCVbE0ZX9znX1ti2bdu2bU/w89nm1di2bdu2jXjqfWO7V1ajUru2Otk4QCD5qIRbqUqtRoT2aj+oDynwApjhwNN34fbsPKAPobrrDjggvbggAz21cOiHFyjoKeIpwkH3sHvRve4pxWVnojPdve7MdZY7e53zrq+bzL3r5nDzuTXcfm6iJ587Wa5U/lMuekp5hHv9Ge568okijyiFQ0F8CCSITGQhK9nITh7yUkDxQhSmKMUpQSlKU4bq1KExzWlBK9rwCZ/yGZ/zBV/yNd/wLd/xM7/yG7/zB3+SyFKWs4GNbGYLh/BSnBhKkI5SJCVR5iXs3j4iZGqZyX6nKNFUsq1UsSNUldVkDdnADtNIz8Z2mmZ2geZ2llbyE7X5VH4mP5dfyC/lCNUYKUfJ0XKMHCvHq8YEOVFOkpPlLNWeLefIuXKeXKg+FsnFcolcqr6Wy1XK36SxbpUOLWzxg/tsXJoSxlcWgw9FlVPcTlLCLlHKtpAovYruU/SyIptJlH6ay0K13Upva8e/rYNal2OcjWGB/Y2XYGIoR6SyjtOOaBQhXJEQRS4qEvag51P4ktuuUEzGyjgZLxNkAD4kI1AGk1Ets6lVSjaQjI1ys9wig6iicVaV1WQN2UiOlxPkRDlJTparpIfqRNGUGFpIH8IsgQiZWm6SW6VGpMxiMlbGyXiZID1ksBk0tasa+REcgrWbjua9k1ACbC+aMyG2RGONorqd1Ey3KvsMmr9WKUGrtEHZP2iV5miVZrPN5uFQXa21FgShu/bK9V7HCz4/+M4nBcnA9ltfW25z7ZKNs3G89bp3io+47JSdtbHvkX+Ct+dcfK7+Bdtpf+h+/o1trsvLQPQzsat2+pW5F3jvS5U0lhdi522PtbA9L6zn5efGkM/y3LsGAHbD/g22Tyv213N1GtoduwmSRzWG2go7BIS/cix/ameH20SbZFOJQFgyAFto4y3STgLhds2m2LIn+dtsB9i2JxWyA9hJ9fuNXeLF+uvtiB0DCWES6wxgl+WMN6zPWQDCnu6j/sUmGs+LuV1spo2wdRZrE4gkiiiLfNTvJRtgJ9RHpMZ/WqP4FIBQVAv5Qp3L2hFe3GM7/qa/5BWxg2/Iv/NsW7UG7Bzvdb0p326+Inb0PesfeLf56q+7BkDEK/LaAQBJXldHI9X96Q6+dVSX3m8mGhvy7ZdDbXSCE0YEqcn86BTP/eQUL0oxdIZTEp3iVKIyVahGTepRnwY0RCc6LWlF61ee4rHEEU8CiYxgJKMYzRjGMp4JTGQSk5nJLGYzh7nMYynLHp34m9CZz1YO4ZKfMOEQIRxSC4fMwiWL8JBVeMkmfMgtfMkj/Mgr/CkgvBQUARQVgRQTvhQXQZQQwZQUIZQSoZQWYVQS4VQWEVQRkVQTUdQU0WjmujcQMTQUETQWSWguktJSJKOVSEprkZyvhYdv+A4ffhZefuVP3WPRaUeiCGUEYwlnvIhkApOJYqaIZhbziGGpSMoyEcFykZRNwmGrcDgkfHDkP4WQhQ3EQBDE9pmZ+m/pK4ovGh2DLW8Y/0wRrZ3sTlWy/Ut6kPnlj7St3vzVJ3/zxZ878t9iVrSeNZdng1ty+3Z0tRvzw/zamDuNWXr9V2Q8vEZPedSbe/UNmH3D1uu4Sr5k7uHPvuMCT5oZE7a0fYJ4AWNgZGBg4GKQY9BhYHRx8wlh4GBgYQCC///BMow5memJQDEGCA8oxwKmOYBYCESDxa4xMDH4MDACoScANIcG1QAAAHgBY2BmWcj4hYGVgYF1FqsxAwOjPIRmvsiQxsTAwADEUPCAgel9AINCNJCpAOK75+enAyne/385kv5eZWDgSGLSVmBgnO/PyMDAYsW6gUEBCJkA3C8QGAB4AWNgYGACYmYgFgGSjGCahWEDkNZgUACyOBh4GeoYTjCcZPjPaMgYzHSM6RbTHQURBSkFOQUlBSsFF4UShTVKQv//A3XwAnUsAKo8BVQZBFUprCChIANUaYlQ+f/r/8f/DzEI/T/4f8L/gr///r7+++rBlgcbH2x4sPbB9Ad9D+IfaNw7DHQLkQAAN6c0ewAAKgDDAJIAmACHAGgAjACqAAAAFf5gABUEOgAVBbAAFQSNABADIQALBhgAFQAAAAB4AV2OBc4bMRCF7f4UlCoohmyFE1sRQ0WB3ZTbcDxlJlEPUOaGzvJWuBHmODlEaaFsGJ5PD0ydR7RnHM5X5PLv7/Eu40R3bt7Q4EoI+7EFfkvjkAKvSY0dJbrYKXYHJk9iJmZn781EVzy6fQ+7xcB7jfszagiwoXns2ZGRaFLqd3if6JTGro/ZDTAz8gBPAkDgg1Ljq8aeOi+wU+qZvsErK4WmRSkphY1Nz2BjpSSRxv5vjZ5//vh4qPZAYb+mEQkJQ4NmCoxmszDLS7yazVKzPP3ON//mLmf/F5p/F7BTtF3+qhd0XuVlyi/kZV56CsnSiKrzQ2N7EiVpxBSO2hpxhWOeSyinzD+J2dCsm2yX3XUj7NPIrNnRne1TSiHvwcUn9zD7XSMPkVRofnIFu2KcY8xKrdmxna1F+gexEIitAAABAAIACAAC//8AD3gBfFcFfBu5sx5pyWkuyW5iO0md15yzzboUqilQZmZmTCllZpcZjvnKTGs3x8x851duj5mZIcob2fGL3T/499uJZyWP5ht9+kYBCncDkB2SCQIoUAImdB5m0iJHkKa2GR5xRHRECzqy2aD5sCuOd4aHiEy19DKTFBWXEF1za7rXTXb8jB/ytfDCX/2+AsC4HcRUOkRuCCIkQUE0roChBGtdXAs6Fu4IqkljoU0ljDEVDBo1WZVzLpE2aCTlT3oD+xYNj90KQLwTc3ZALmyMxk7BcCmYcz0AzDmUnBLJNLmoum1y32Q6OqTQZP5CKQqKAl/UecXxy3CThM1kNWipf4OumRo2U1RTDZupqpkeNi2qmRs2bWFTUc2csGkPm0Q1s8MmVU0HT1oX9Azd64w8bsHNH5seedBm6PTEh72O9PqcSOU/E63PkT4f9DnaJ/xd+bt/9zqy+MPyD8ndrJLcfT8p20P2snH82cNeup9V0lJSBvghMLm2QDTke6AFTIsiTkKQSTHEeejkccTZeUkcYLYaFEg9nCTVvCHMrcptMCNuKI/j4tbFbbBZ/RCC8hguw/B6fH6v22a323SPoefJNqs9Ex2rrNh0r2H4/W6r3d3SJ7hnrz1//tVTe08889OcCZWVM7adf/Pcg3vOfi7Sb7ZNnb2MrBg8p7Dba2cOX7Jee6fhjy+tvHnmqCFVJb1ePn3qzYznns1497K0c1kVAEgwqfZraYv0AqSAA5qCHypgEZilRWZ5UT2PYsgNdAxLlEcNYjwKajQGgw8Es+JcAwHH5qETLIgby1WDHhpXgAyPz93SbkOsep7hjeL0eqNVIP9lTHKRzEmHdu0+dGjn7sPHunfq0LV7h47daMbhnXWvenbo0ql7x47dmLCSvrRSvDNw6uSa3oETJwLthg9r37v9iBHt/3lj9amTgT5rTpwMtBsxtGOfdiNGtPujmzivGwjQpvZr8WesjxPZUAYhMK1F/0qJXHRyLXWOAx0H50dxboQfxapphKtHGVUGHf1gc6PC6GkIo0NCsYGDIdUo5n9yHFb8Uz0qpyqHT8qpyOmZI4w2c1RTC1d7tc4anqdBGhkdmshNVo7GA2MF8+opFMrXcvAt55yfJNbVj8SKVhCJpBCfz+vGL5mK0yVjQRtLLX1+osicbALyzY/jkdK22by5e7c3z+x5acqYSaSkScEL3Xs8T9l3/Qc8NvUqY+SjNsv87OFG3YpXpZYUzytzDe7coy/ZsiQ4Yuzd/U688NSmCXd17sZub3v7oC2fjfhCGltW8VnjxjpZZy+dWjwpIJwormzTK79/iW/wBAAgqGEiyZKzQISGiQpWr1h4SISYUkm57FNqBQIBVkr3y8NAQ+3D36A4IWQV/JmZqJw2NT1T0Q3QAqTsQblg41NPbiqQH2Iv035kK206mGysZG3YMSs7xtrMDAyhTcjWSC4axqy4LiZRQdFdvnTNq1KX320HjVawZx6SCzc8/UKgUH6QtKPt2PKac4MDleRlMsxKBpFXpq4ZVBNmKyIxHbSvMAF1NBWyAQPW6z3nEIpfMhe2fL8kuIX8TClDEQQX6cwueUmTlNNpRPey/31uR/D0LuH14ccWkqFs//wTw9hv00gu+7IyEr8T3Cw2Ex+EZHAAktOEiPrIJO5s8hWcNqema06vU3PT02QFW/8NW0tWfSM432N9SfA9chuP5WOfkxnwHUgggyki+HwUXGw8M+65u8v3uexl0v7FyJpdaRIdRN8AAdJ5nYKQIGi4CB1U8zNNoUnPR3X1LjTb4EsQYnsMWACwJO6xk7e4bT/99GX0N7R2ndAo0jMzAOfHN02cnKkT94fv09bvr5QLAD8UpuJ51ev0rCK6SgOc3gCn19OKL9lADWokUbkS0ldBzwNNU8HdEjRXVGu0qPKIei288y5jBN59h9Cfl8yfv3jp/PmLaAn7hF0izUgO6U0cpAW7wD7NP3vy5Fk2o/rUyQeieM4C0DcRjwS+aHYSJiRhdokFkVRTjNUkvr1gffj25dM3f2ZXqEN85awnGncAgOhB3A1hQDSuhqG06+MGs+MEg0I21x4BImqiqcGk+kF0sY1xoc8M45pOL4mpgk13GVCnJSTTKXr+KSPXFgybNz6w4msqEctn537ZcSt7XKC7j1Bp9YE+E9bvXiU/S5K+eGzlJwfYcRkI9MM9smOuzWDV/+9pGmaYlnq9hLYFMjf0Fje13Izl5ntACdyDxkxTg0pcymnYlcImJDTWkK0ZcHQO3nrRBvWETcbdrEfVuA6VHa2IuhjrtnyGTjYeWzR1zsyJK7+iMpFevcjmTVuxkH176VX2rUy/Wls1d+3ilceELgtnTJs/d5R85OMrL40+Xdyiev7Ln15+Uh6/ZNmc5Qsj/CwFEIfj/jeANOgFJknoJonXwOrVZBeho02iBmkcTDlsEq4XIUsyjQo+3p84FpvOj7aLuIlTcynCvocf/qlml0xn/1WziWySrVR5nj1BOt4mXPlnKO1Lm0d5sxb3wsB8cmFylDcEVyexVFLRSeV8JAmXnJAllfClLUX8xpYRRhu0x6VoUYM5CS4WP7Qol4xGbc5ACRJ8Pr8v3WalWOW2FIsc2wbl3kECqXmlRfO5Xd/44pfPn2a/S/TjFRPnLl42d9J4O90m5J9jt9zYlFL2x6eX2A/nn5Us0xftWbf+UPvWQGEBYukSOQMu6B+nMDE0VnSsHA0kECeUCrz7ItigIy5ra0J7xQK3tGcqRoQsNh92U8w/JhEZmLktBoMe7bO7rLB0epebg632jH3uY/bP+ffYx6T9mVGBvNsWTF8WkF5wOh7Pcnz4lOJvxb4//z77iJSSLGJH3RhW06N96dRHXn5ww7qD0f3pDCC6cX9ugKIoomQEkXw9VczkxNMLnBCUCoruT0/3oxKL7r/NJmk/p7m+evWfGuE78Vt2lRns9N13kx40+4fnAD8CjMf6NcP6ZYKOq42NrmfDJWy4Xj1P+cEsSLLxkhUklCwkOAq4oqQVOOpuIs64nGxq0JVQz7ij5o27pAixmy+WM/67KC2ZsngH++XyNfbLtqVTF/36ykt/vrFletWG9bNnbDTmjRwzc/aYUbPF4lnHCwofXvLa5cuvLXm4qMWx2c+eP//PkRkbN1TNWrWa/j1u+eJJExcvjpzFAYg3s44vfRL+t0nkS3xjCynWFA5OSSRLynVkyecXVH67ol5PpINovJ8YLr/dnoHXLW8MFxXW7i3ZMSj8I0l96SOSyi5/3XNvxxtbB5aMDNy4dsmE9UtPPfNIx46difLpNfI/7DL7kp1g37C3GjV6NCeL/NStbO2ps2c2bD4CALW10f4qDgYDNPymcCtU8R4uYw/H8WnY1+/HcReOEKGKyJDmBj5OcRwItIUhwnqhFpJw9xFg6CkFlTYXTfVqZdf/tfIcAE0d79/dG2EECYYQQBQCAgoialiVLVpbFypuAUXFWRzUvVBcrQv3nv11zxCpv9pqh6DW0Up3ta4uW6uWCra1So7/3b3wfBfR//rVcsl7+ZL73nffffs7HTFBR5D3WpvCDmUdIQb1I01myQTjoQl2MRpRl/r3hG4oVpCF83Vw+kdwei2j93o4WagRrjD/Nw7YgU6IrsgAfQGRcYCTLxUZur5kPuL/lYuuNgU1XoSa+ueEfPon+J1yrD1J7UCC+5VG3BHBHVHcEcUdlSGKO3nPyzABMdyNFOv48MTEyEXCyPp9KK85NAqGGrz6I7y65gckiwz3dgAI+xivtAIDOA3LqyxbS9V3By2ZYgWxj1KxdrMPUEhIZKJWxzrtdWqXG6lJNABmTO6TO6EgZ/pvgvDn0c+vb5z6WEvxzh24q2xeXq9VAwomDR8q2098/X7JuWGdhg3GY64xvHvgZPkLaR2wgixCI1vHWKJpbdGx3G7mDCO77O7d6Eeg+9T6IJEoXP9qW0dDeSvNbVsrcjvaUN5aC9pa0c2ZWrhMKvyhjOgmkGUyEsFkpRLVKsh0dyc2B5YQICBgIe/NBCIEGNktqHxMBISRCV+50v3qzz2L/GNX5i4ra+5/7cXJK/oKktUtLnpWmZsBf4zfwZ/i9d7NYU+YMLgiIyLr7Gi8AA/zaQ6/hPNgCdx2D3ukdEseEwlhjDkuaOZ8eO9b/PGA3n2za6oggAlxCaLjSGGvi6/CKXAHfhxvwhtxbhtLaVQsrIM2+DLywL6O+mUrO6a7GfRIcPf8hNHZAIBE7VQd8ASDAWfec3ESdiGTC5nSGsiiwiLUtMnjuEOk1kzFcI9JHoR5kz0Y+SwCsXdhGH0VKhzHp/+FzFeRz9+O7fCtL2Q4AL8u2e72RcFosiLP9wIgHmY+hxmEgGJg84/lVDxnGtpH+FMziw5T/GGx/Sx9V+NPbS1/uvSGcm/t5vGnTEK3rUG9y6yEYO1+tfpYOon3TSpILhmHhztfw/bCn2qhobiwdDW+fQN/CjstfKZ4Dj4A9dOWrFx2S7KdOD56V0TLD0s++Qptwe2eLpq+6O1Jo56aACCYSGT3GbIfW4Kuj9KLgIabbN50LDdy1C0P5CSL2U+190OAThfGG/zHkIjP1Tfgj2ByPUSwrYiu7925+a0D27bugj/KF/F1OBh6QhP0gEPxrZ/ljc/fsONrFTee28R4g67DL2Qd3IERJIOHLwGln4cGSUJdTxdyhgDi1AKL4NMYAdkLvyXzDscv4Os/X3r77Nm3JRt+Ef9xEdfgl8Wb97668d7lQzcAZDjMIDh4glxAaHWfDV1JZj/rSS1tOuz1hHmUcIAjHG+MklgeL6F9LCbnn+jtWIJ+rI8SzjpaowWoDFuPSrZKXAiAE5+ZjCY9wHwiifwfvmXsI9wJMhnuBBn3B5CRXWYPc85tcJTWCd84gtBCVOTYSOfNYvNOJnxzgfBNCMgDJG7zSAeR2NXUTWzOuYmcC5VObFq7NxloMKYVZwDIYliIk59EGoTQ8FMi1WHihc7472r8D34dZmIIYUsBXXXbuXHroZP7iteG4MvI91jOCtgbusEO5K+347Q8e+MPb+JPbT/Gt4ZtDjppKBnYmi4D3IJyT8WxGL/UbqKsmPH2vW7kQdLd4LSKMre9bogIAvLe7u0GiyvOul0mNypGuE2h989SwFg6lJAPH3RNyQJYyWiVDLWO6XV1aHWtQn/HIrSI4vwGGfYxf74lFwHn0WS/ZYX76uoIKFu35IbrwlVyYQCxLpa96kTTx3OvJq5zuRfv5Pnw7hyqq8P1Z75rABK6Pm/yyAWS7d6fZ34//7k8f/ry4ka6xjKbeygnyTXR9CbFOhNBTIUiJtZlQleZiHWo4RgPKCvqPoxRivhqEFpQ55fr6lbBkzDE8TtKxt+gmY6VhGRb0QTHkw6dul8oThJo+wjtwodgwulWsMINaHf91LqjZPMpvyPTOJQPmKOhI8f8PFG13EQvVGfduUdgdUUc7AqJkgqDxNrKgaMhs+eobTNFT+700efrUV5FO30KebG5Uc8EWtlONUbCMKgzknfwPPyXDJ+HyXX+Mu77L9xf9q8jy7JPHHm3L/wDzYL3tomF0LEaU3YHPO9P/D/xPpFcNlR9sDfKQ0VIyDvYAkWjZCRQzAmOFb5urd0QeRq30fSlk1sX8kKZEurossFEhcHnyoTDl8u1YiS69x3B9zwSWwMExpGYerP/TAzKwmQIe+FjUFIzXI7/xHfxIdgdStAT9q2tfHHfu+/uf+kjNJB8sB+OIDdl6AFH4n34L3Twt98O4jvvXP/tEFB10nkWhzCCLoBffFVBMRMFCoqJUu7Jo9qcQ5WQhel6UVXuFrihDj12C/rgmlv4Xfj4imeeWYHfRW0c30q2f05/8nfluilTqH6k9PKT+hJ6GYEFpCu4GMj0BlevUyth7YJ7K4qXwVBu5hBhkW1IDMiHUy53QO1z+HbC7IyHkG/FrwOur4fAz/Q/oGEDoWEgCAODHkFDdtGcXDTnCMq5zh4tAL0r8H4kpavGhqLpIBNRJVTz83QOvA09Zkyd91RIxN025kVT8WEYuGH50hX4HMp1PC/ZLpyZ9q+OkeWL52TMDTFb1nadMXVp5dSnJy9Q9tJwohNfko6pURM+HNWSXLSkiJtbsnyG2TXfxfFwS0N5+AN5LeLfk+CaalbRx3ANsgkVK167jf+BYVf/gGESurZtzbKynQeu38YXb/6EX5bQb+9sXLEFzhw+vX3GF6/ZfsL4bXnqqum5OZM7pl96/eA3tz6Xly0pAhAEAyCWMjs8lpcL/M4jdosEtVlJxXhgirkUP1GHnxBHE/PJKN6sVGi0nNDoFpObCZzc5HQCL2Jc1JAPCxfF+1idfOgj3sJVDXfxqbrX12+xS7b6DrXYAcVbQnV9h+07dmwXqum83gBIErOT0h6ti1Svgj5NhjuVyQPgGCjm2X0hcx7M1kRooc4DKgqUA2AuFBx3fnH8AwW4oHC0GH+3L9MPbQCQf2TPuZTjaH4+bo9y+oEPGxL9IFfbfYkSzHAPk61ylpwjE4wKyA1qmgtMS6QQLWHPpkMRHYZTpdFCH61HFGtTIrRCc6KRuj30nxUBCMOOwggIr9bgFy/iizK+cAm/VAOXIklse+9LnYfY9m5f0XTvOnueTgCIvzM9MZCzvDVYu64bu9CRCx3brjqoeDokgUJH8jwTKfoEd3emyyzq/2glwTUEZ8DP8AVcRf5dgafIVSthCwp0tHeEojDHRXQJfU7X1YvgdY3g5QZ6cnhpZn/AMhdEigqdGRClC7oCqqHAaIAYNrITG6pOLWguHAm9sa4We0NvdANV1WdjiPTC83TuIWTuaYynHgfcdA+1JewiQCzqxW0bu7vEwj/M0IinwRkTnIPu3PsFfeeIFu4ePbpNHFi5Qdk/S/FhFCSvBTrQmuaUyJS8Jc8JFaXYgdrxKOiFF/B4uE2q/ueVI7rPld8ykZxQQWNOCMVqtyP5KmUV0w008gZRM18weD0Rhy865yaANFUl8m6WjsuY0hgTKbXQ00qBl16S195pf0QeDCCIR+eEeMWP421XpZaC+eZCZJgOCp/C6Ndg1Ccv6GU9Ooe+cbSFuxMSGC5CQ6awjXnnQZr99YDpJtEo17b6ScLmDz5g3+srHkZm6TgQWX5HiRfY3yJDRTCIBYg47TQ3EguI536ZvstWkibUTqdDOh28yXA/rXTQWwwWY0Uhj6GeaEHmKuxAUC8ehqKsxkeh2AeEgGiwWcE2gGAboOcEjmscwUumaSUSSa34wOusF7ELa7zgtAz3Eq8yr71eb3mJxRXZXiO8iEdB7xAOrvFq8ELFtgBOj9h9A2RmQvMxZC8X7WKJUKJJLHRs5YNnVN+bw2mwVVE5gqeXj9DpX4WvvH3n+yNj8nJG/QZ1dZVHfm3u67iSu9H/o4mz+7XtE9lr3Jvbdr81YuDIvunyouMfVuDgrHnJb+Ym75vQPe1JgMAiQpME2R/4gGAwUKMtfbWiT8+rG16i0GSJiTelgngLhgXJdNQ9YHkGH0Vr6nz8lGBEwsWThZs7+Z+p67Q67/TFuukL+xWFBE/OWVgM/7mJL/fPXi37O17q1oPIn/pXqp/IwJ0zu5dvpTzUj/hQf4p91JiJYsfrtbKdZ0SWuhGqaWbNl47lZtcYt9XsR7Q4IgYJjeapCp5GttOHzr2AJNzwdk1DQ01lnYguzsh/trj4jQnZ8rYLMO5G2HUY/+Nb8tD5J7aEbT9G+S2H0FbgacuI5qslp57XMbyF+N/R1mhgQUdaSBWpROetTo9c8c9zLp0csspad8Y/bkPBiUt1Ty/oPSk09Kke82eiZlCAqd27oJx/fl3eKxuG3thi75IKv03J+uxltleGEtreEbOBH8E9T4O73nV7BAEdZeygWHtZEPGuS4LKSMkHZ1u7BNV0LmSXQgEhNzCTBJTJoqM8wQKmAuEQs4Xmn/pexTXQ+8x31xx5SF41b9TqzD6pp/YPm94MwTcmmGDMjTY3YCLEf18ukxY/3yFmb0IPYV/ZZClgXCmAIAoAdF6OAWYwABCWeJDuRnJhdH0qSmjIJwC9ubggrebyI0KSVbDRzapJptHE5dkXXqi0hT0RE+DbMSg7+8IFYXnFwgNHPT0Oi/KwAQsr6udSGg/APUU3xr/RYAxwRc2F4HpyofdwXgSSi0CKp54PAwby4oU8RZsm2CVRiSCw7A2LuzXFOgN+OFmw0ep/CuOb2f/uEZeyvvfSudZVw078UDdrQZ9JltBJPRfMIVyEYFpOnzX3jn/2U0z4B8Fh02ZMycwi3LT5QGYqPJ+c9flLAAJilot6sg+MVD+rvgO/CzihojXInKuh50RKgiIQw3zY9lR82KkJO/Nf/6hu7Nju08Lr6oQ3ew0494OjCG1eVJwcV/8rmZ7x9ToA4BJywXI2Gq2nd/VxkMEmqbVesraew1m2uISWLYqdoftXAKAGG+4J15Lf9SZPmcFJI43RQ5aP2xlEDvmoczRX56C2taxZHx+WMFn77outO4c08+lkSut+k858b8WBSjf3o5Ju4DBxDkMDQLAYADGF4KGn/K5OzFVO6h8d63FDSqznvw/zwCtFtbWF0Ae2wjuJbXEVnsORsn/9UriHpBTszLZR6c3Hx3ybjo8RkrJ1YvkvIM8geyMcjNY8h15r53Kblhej/DZRLsLIRRgz4vk9E0xtHTPjKLMLX/nyPAbzveL3TZi4LaLT85P/daRuxIg+T/mjuoL8HuNakeVY03vAyJHDxl7+0TEdrVk5dUB3bz8PRxZas2zGY3H1V8XOynMtBED0FPvQvcA9F/covAK7n5yjFyIXDlRR5xHNbRa/v/CVI3WF47pPbU1w25WT98k5xxD04txx6Yn1NQwZRT/FEVx8QBhIcsFGTR5TDerHW7bBfD1eIpnfTJ15HWHaSFrPaCZsm0jj+ZEEIx1RQ0uX/3xt6bJlS3/5ddnSurTUJSXpGRnpi0vS01DkrZ07d+6oNd3eQXzEuj1jRo8es8e0c0xhYeEOhuMiPJLiqNWhbIk5TuCkhwdvrPxP7RPK1+Ym7ZO4S8dz11rrPvGP21jw8eXaBfN7TQwJmdhn/jz4zw18qUuGo046/0yvvrgSO178IrMzNj+W+u/NjL54pFDvxL3/o+S7qvI9XLj4kYir0pyg/hDln7/OGnSsrtMzg5ny7zEuNHR890bl3+fJJXcjkJyaRpX/weQkeCch9auXnXsPvUPw9gbdAC82VEWkd42p6g022CjAKkbAKTSA6g71itCIdMpo5y5DO8d3HxFYd8nQdvEAvwiDMEJMSXQYxM67c/J1EoDUThfOkvkjQZnGItW7xm8EFr+pGCpMEIjZPVNYTl6U6qGKF5sdbEbu6ZsFkRf7oGbEWTA1g9NYcIenqJmL9dhCq+1DQ4kTIoQaQ1Fe09EfZ12Ha/SHJYETrYxp0JWRS46euHr4+DUS+hk7dEju4GVnjt069sVtGf0gLsrNHwsjknoEtd1a+syHlevkrJHZjz2WFRi1femGg9+ulvMHPaHICnPDdbRAygRm0E/jU1M6qIUsetcINl/YRG1cN+6BaXWTL5V4PtRMUfjFrLgcVKv5wDePHu3cwTfCJzB4UPvl2154QcrE/1Q4Xs16TCfbfYy7X0aDKqBOwW8ekR8eYmcmy3iGVrU37zloTa6m9Hq4ExGrEzGqaYVQ666xb1bV5uYNmRVa9+WeQXmXfkMrHLPWFqenCM3uHQcQhAAg/EnwcAddeCnGMS/v4iESE0etEalOtqIslINICfNI5IwrKdEZK7zTXDZ+cw8v+gIvvAcnDxmCztw73ijHwwGQqsmFASzmrAiNNqUXTdsBD5j5Is07sMBWhiedOQvSvINEyw6IL27vRWtW8nRFOsLTQbp2OppBJ7ds0FkqxxAWInU0nW40G61ikvzKNfztiasI/nQCf3vtDfn7cpgEBXjvOPrRw8PRUuzs8IDobwCBBQDhJnkOT1DM8RgnXR8VT3LXeTir9kC1PZy65WPp4EuHAWSgnwjVdCSRpmgZ5h3sIQ+TJ8rMTzdSM0IQ6IjEj6EZvw7z8Y3PPsO/wXzy3hedgE87rjku0speFIbMCu0NuKdQT3A2gWGcVNVUOel5VtNwAhWxRkrug0pIkSz8KEjQdON5kfIBwU7W2GGJNN74i798E3rgjOhdZa26hbTw6qDvkh3QBs+C7tD+FLp9L3TaPr0biTgMSx4lxgBIdBYQqihv8nvkPxKbKiWFSetRqOOa0OPo0b3om6odCn2S8Da0Xk4FrUBbQMtjQCxNiWa70doHMnC1gmadmyKjnVH4eJaHZzLBpInSo4LKF0aMGjXihcoOo/oNGjx4UL9ReFviH6+dHj/dPn3i6ddqEldbXp5/evz+mNj9Y0/Pf9lC8XgT18KBD611htTiG/jSS7hWfl/BuwXBe4YG71axNj+Ctx/FmwxaWW3Xmf0Y3uYEBV+GPlspiq/VFKqg36IgZ2he3tCcgg5HX8wfMyb/xaPfUTwn7GsXvX8SxXN1Ys1rpyeShxh/+rU/EhU8ZsAl4gUhFgSARGAzECSaqly2GfjqJxb7JTdtAXRHKva7oocjFffQaU1csC0bvD4ncUj7lAGvvr5i0Na+CYNikweh37d+mdm9fbtxT/ht+SSra4eooh6Kv1KGV8JSsTPzV6IYFVUxpqc6EFC7nBb1y5oKa01zVSn1UvBKoQrC60puxFNokCJAGJio8cU4ueUaM/GkG5iObmz0uO+xEG2ivTBV0zGQjuUtm4isKF0/LLjCuoL4+MqTQ+deQsIH6z/+6PTpjz7ecVBAlxoDLNLiMy2v/xoMIz8Pq4ZtQq583/KbLVJjoAUS7QjEiSTfEwoKwH0R4JpG0O4m8ih2i8SqZC2x2gwVLZGw0AIbe4CvhX7s62otmglX0S1oJYwXSSgcyRsDZrIvf5FiotBX9REesbHSczvdf608+5OIrhcNHDTKHS5DQ4r7b+t89KhXef7cyt/P3jxnlycULpn5e6Wy3nkNP0vZ4i1WsdoeECXPB1Uj+QLUmAe1Z6QuUik9TYxMdNpbiWa6jZVEoi+xGZvHxxGTF4mpvQ+NKXyn5+I1Kzpak+LXrVnbw1Yw0t5z/dpN1iRr7Kq19bNrXnu1pubV12ompXbJTF267tleB0YVHsreuG59Ykpq0qb1W/v8e0xBec8169G8QxhDdOgdCBqUPRQIgPg+2ft+YKqyJn7kEfy4TGIzrUFJVYm3UYi2Az3d2OQ9DfWSwWZk7Gfk61bkaqYa6VjeTHPfw5k0sJiUf6SlTvkHLegpmAW98dPQF++Go/HuOrwTFpK/YDwNGoQOaJEjofLpyps3yYBOsbV4hsivIqW/ka4F4KuM7FDZezDWLsmAvpNiK7ylYAnRsnCy/ajF+8zPP/+Ma4UW9T8LH6O/AAK5uLW4mvCqldjWs1hni+qb0t80u4c5c5Kp2tywOVWtjHexYe0dwpSuLK5Nyt4ysQO9G0Z788hYHt1kpTJXru5s1yMjTW6KvHkbzgLTyntzAgUXVw/tn9UV1/zyA/6UGLmvzp27evl7tT8P7p/VBRqv/g71JMe5ekHp0rlVt392fBLVJzwxfv7R+MdDElOegSfyVkZ1Wlnw1vFT52U4d/Lo3r2HJWW8++aw1e06rSp45dPLJ+XC5YW9Bw2K63KonUdAM9PAzkOHJxpMnn4DH+tboOyT58WfhDnOtWnFMjCwmppROrVc1VtHDH5E+YHsUon8CXNqa3HQrVviT2fOnKEZi8GkruEHqQq0JPomHsxQ+DSGLEVMI2tayYWV7juLeJ/HYkjht6hR15ZISmox1u4ZaVFaRu0GT5G8KzeKfIWeqFkgkXaTskI9ZvO6+BTO6vtwpV2H9e4ISvKfjeIgJNp27ztyZN/uchFtGjYsv7Awf9hQhzcc/OdtOBi/cvsv/OpcuAe2gZFwDy7A5/G3eBQaIG/d/eVbs974eu9mOX/gymmzn342Z+QyfAdvhROgG9TBcXg7yVknQxvui4/hKtwH2mkfAqoQfFiNWTR4i1Zf30+dUJ4tkWnqhg4hZKCKCFSz9IemXlYvs4phfaz9sp4UZQXrY/WouCJdn61HJJdyRn9Bf0NfrxfzKjz1LfSImI/6gMZ0iforzMmMaFzfDPcPI6ojrkT8EUG+BSIMEWjaQeVamHaQXodECMWEvk1lVCKbzqigkW4egmVKn1mlrzz3bPJjXZ54Acqvrl6+W98Mr7BOav5Mj5zO6KgpNjA2de7EKbOtaZlxsV7yqNK1y/Fx65Co0s5hEzLaR8coteujwAxhlrAJRIDqvy4BHaiGXRsuAQhK4EzhqBAOJNCccm25IPBZQponO/qxY5mQBWdC8TX2W86+NCTTqlwgqnzrCcygE0gGa/jMNl9j4i1y/q5Jw4MB3ibW8BtbUR1wJYDk3FqYvFlzEVmlFiTdZg1oQS+tseX+mm+F+luVNmFbdDWpvKZNSJ1FbVhCw6dGDf8qpR9+TZV+RDZ2JQ12Zdm5WoaGh7fCgK1vpianJeo8drqLWb32lHXN71NQis7xPAtTXHj6DfyW0H9ZSfKw4KCneia1zTQZTP2iErp3XZ6a+ERnpq9WSM2FfCZPDLSLievSpGuS72iLvpGa76Gyp0SwoVXSMUb/ni60d1flz1l3wugfuJ91RySF6U52ByBD08vBtwwrkQRNF1HJzqJJ27dPKtq56sk4a/fu1rgnxXcm7907efKOHZPjuz+ekNCjB5OJIxquCXWSB8HLG3SluoWL4hHF0WQXpV3ycle0l82LU6Z8eyUkI9pFl+IbvAOO/QaG1x8RsoSVJ/AMuOoEXHT3chWl41NoJ/pKOgECwRjXrgKVMm8B2ssAYLGS1Z1C34XQevFAzV5H1do2A/SQTj6CFWyqy4CkjtBXjv2wY0Yba0JqxttIfn39qp0FsxcjmI92rocg4fG27ZJSOsjj1pfO6DdzwmQZQDAKlaHrJCcdBT7URBoJ7uUy0liItFCCjoHqA10OJE/wViD1UwLJAwXTyyl0KKNDOh1q6AfZdGhQgOkzk2+Uh2qkZFQosyiiyP6LgsUHY6PSo7KjBPKVKMJK3lHBUURmXo6qiSIC8gNyq7ytZlv6to2i3w00KAHtTk0QRY1SaRsB4+H+zNTMtPh0SqPSza93T328Z8XmFYdk9Ha31Ixe3bvNE5+O7xAZ3y5UHjV71uTE4QH+I7pOnT9nqhxtjYtJSlyi2HuzST7/cWc+n+rCdJHab3RooEO2SLP5IqULeVdBE/VE3rxFPxpBB286XCYf2cD9fD6gpQACaxQw05Q+9EK45oh0XMb1bM4NJDYczOIAOeAh4XMuDuDhEizjC328XZtzNEEopkJYjBguHVMweErLusu6mFk9U0dH1JJQyqaXZqemCM3vHR8Un9AiCKdJ5xWapAEgTGU1ia01cdQHGhUQUFxwstVCAW2vsvigBTnXsAMK1+DjyA0Kn52F0t2+7Df3of5wg9BFkVNC7H1yKXYO3FBbi/r/ocxfhDPhSQLpDTowf9pNZdipLAwgcnHCZqLWl3AyS6RiGibCNM+MQa/u1qX17NY/REjw7N937Jxn28W0ay2tUuYajLbDLUQmSqAH3wf8P9j3XHewTeC82LD4cLjlwxKYjrajki1mJudmEXuknbMeNQOQFeREsL3Eg9ojdAghA033uB7p8D89p2HW4T17jhzevffIW0MG9h8yNGfAYHHmpvfe2zR986FDmweOGzdwes748TlMR08EW4VVAjE8wGd+AOjAZ3Aqu28DQLpMdHUkOA+Gom3k9XPoD4heAt+gdwEABo5aBB/lOzKQqhhsOHBr/C75zjkhmn6Hr2pk3ykm39klnWDfOcu+840wi3XNfQsMaCf9juposO8ABEbimcIXYmfWA9YDEEl9v/NL///p/JJZl5eye6xO+zaOdYPRQ03Q6yh9ct9h40f3m45+E+CfH35xfcO0pGDS+oV2r5ubm/1sTsGkXNb6dZi0fnUcPhjuvsZsKqUnSReKIkBr9mRZ0APmAndwwEsSxWjySCqMRYWZCT+CwymMwRWmuwpTBV6BQylMM1niYUarMMfB6/ApCuMtu/yOlwozESyHecCbzEVhaCzIi4hiLe5lKuwxmAEPUFiTRGFNylEwzLdp+AsA3WDJxnLJW7iqz0c1PwiiMxRkHyHAPJdOFrsnkJ2+CSCtMNpQpw3wLrTAl2vINGVgL6LueAodcslAO+gF8o/aB0b2By0k/Dy4fqE39ngHXyJ2wRXHXB/U2vGTL9p69yac00JS2rmO4fHHcAIchxZAoOwbnEr7nghdIgDdN3PhkYZ6cp/197C1bqOsNahqXGuZ0V+F6a7CVIESZR0NsguMlwozEQxvXCPZZY0avqC9HGzOdsqcDUuUOSUJNf7eGwCghTqLCjMTJCn85abCNJwjMHMZXgpMVUOagpebrMK8T2A2MrwUmIkNgQpeDIbWKUmN/ABaKzWzTN7Nf8QpC3ZBAk4WuExYoOKscFkgWjZdoL1PAlXFArUjhGABFZcjQSP9q12LdCSuL4haW4GN1S5q05bRonZtERvxyPbt91u3WmEHa966BAW0/lU0Q23hQutxR9bChfswmit9D2yfdXTus98b95nOSSul/0CXSGA6Ofe9H5xGYYIkDx4mQYWZCT+BUylMsCtMrgpTRaT0ZArTSnaBma3CHAdfwMXsd1xhQlWYieANWEzXLoTC2EIMtpbOtYOgN/hauCEuB55ExgYQx8K/QoBG2lEismMPdGykUSsjhIkQmiHUQdgbpuCqTTAZpmzCVWzAx+BTsAvssgW/zwb8/haYiT+gcwgEn/2kP+N3EADCCRUH8B0HfPywPR/ADtWGjNqH0sBbcGh7+tJWeYlmN5XWDVbER+ND1LdjiWdqJEDiyJmhEum2EFMhEvppGjr6b0wftKk0bwztSih47cn+m5b0GVjfM8wiwzux07vtexdV+ptk7BOZH9/Y59G69YaLA26XKW0KJAp5acD3i/Dd7BWxUBjWpt1vB1OLomD9wRYtfjvE+IfVsbO1SHLyhlnZs0bJna2XCmNRYWbCT5U96+cK012FqSJ6dCiDkV1gvFSYieBNZc8yGJsfkZSqvGf10GzOFOec65Q5vSSFrwECmwjMQtaXZQLZfBU+Z5raIfBwRhrdPegOp64d5OpAbO6urpuPVWlfoQU7Rh+ntQ9X/FULvfGt2r/q6v5aQf6TbPjXusqqWvwleReOA1eNHb+G8e0z5Fl3ysEgEgzSSBxfrhrFtbVGLzUaB/4avgrxkZh7SZqqXZrrGt1dky8wcQVPccQMbvRf4Nzav069+t1M2PX8sf6vRHRsOy8tLx+/t3BE+vApYrcrd//9xrSzaV3xTysrKkKDjgW0yeneC5rWD/y8Z9+CTcuUtWB1v9IVshZdnbpkMQika9FODmBrocJcVmFmwiQQQGFiXWBkyQkjg6oUM4Vor1MgwH0YiwpzPC2K/coDMNJpFWaifwvKRR0oDD1eK6ZaO19vFadj4DMwjULGyxQy3mBLdsoZAcQ1XJeXin1Ae/AY6AJOc9XNmkO9Hl3qLLBSZ3s6CKYrlh5bUZJelk4rntOJ3shOH5GOpim3iitq0hvIC1GeTRc624PYiy2dO6GGapk2fLdtrOaSRKut1bTztDNfH/rwCB5LcPB1o5p4HmwsIRWvLj2Tlfz15opjt375NG9Q3qRrSK49Oem1pPSXx3x9wzFEEFevGrWw35OPnaqflrWh7ZmiucOFjPHTPRA8OM40NKfHqAM79rzeffi4YZnN5TWHumSkZ+G7P62Rl+xv3/6FmF6Hnux4ZFS3zGz0S9kMqdWEUrbG/XAqrU0ma/e4065JY3YNq6uVvif3n3Dy4hLQgnJIiFPfqTBXVJiZsLPCr2EuMLLMYBgvpvlTiFCdAgFUGOmMCjMxMIhyT2sKY2ttsFkUPmugzbeljB8/cto9Y4HE7B7VXgFlAKAC6ZQTRgYzW4hai4bZT4cJTJ70B4NR7B4LQAxKp9o9+wnMTOmgCjMRO4AMvBmMq92TQvi/j3QTWAhX7wSkxJivPAgOIiaNV5BOqc637/Uil4AOJq8ges8Um2EONsWa0k3ZphGmKaYSU5lpr+kt0wcmT+IaBpkoTEis3dcUwvReiIm+AF/K+zQS1lbD1AavtvRDczBLGepcm9r8CAv6Aqf3TjUjCTpLkYnxEVSi0fwbDceQK2fh/uJRk/CX3/+IL0GfSwO3xon6/hn4dp/vLL0jew7Y1uVsH9x8wfaw9eMWbtwq6SfgG/86ewcfhwHVP0BzepyUvztlS9E82aeVvsqY1X560b3U6n1LO2RUPDvnTbpOrL6QyZ9+ivwZyuSPWSeq66TU/TH+6u/kwT0Kf7WWFSgV5rIKMxMOVORhpAuMLDEYxoNDmTyMeGAu2aLCHB/O8Il8EJ/TKszEeCYP21AYWxuDLZxxhEDwfFVMFA+ynI8nSOXPaFOsVLGaNeOowQRAT5aiXs9U2vvvxgd1w6k1S/7ExHq9cBsvpqly9PiXH1y8d/simY/gNZPUHh7m7Cq+1oQZWa52lcDbVa14u4pdqXaVkTCMakpRHlKNLOtD7Koc6H41fnTME+vGDx+F//6lw7CoJ9aNHT2+rmUrGUb4x7cqWQDrA/1lfNm3fUBJCYqshfFGnw1f9LhWZrqNP/FutuFs9z+29FnUBqIhnl4nd3ad2RY67G5uJ/Yoa8FquthaDHHyxm5FFphkN7ZiKswpFWYmHACYNPB3hfmDwTDeGIIYhI5BaOc6qMJMjGOSgMHY/Gk9gfJbrN6HzZfrnM9fmS9QNjXaUitJLDDtv+tj+U/ViTbdx5Km1InWdVozvOkyUd07jje6dOfrRNXnY3TIVehwl9EhUEeejgZ0zYz/IZXBrBaEr6XWN11LXUpLxBU5WthwXdeDnYMVTmxOEgvlDxhRQ6KPbjD35jxE+wgj9SppROAseUfz8768ojfzRcP+XEUJX0Nssaj9zdSxUE/ckNRiVpqq0/WoX5y7OAvXEx8oEwrd1mYLs+lJHPRUjnsF1sKO8YUd9x6o8PCEPaEH7ADdYS+9eyUurMRWX6LykmS3Tyrxp1WfAra3CU0QsZdCQQdiMc3WnJb1yMYQ/ribBGCk+iCBGEoJZQkoj3tmwB8aF1FNlUqM5k7HatW4UVpgmjZoIBeSVG0aadjiM5mZJxb9iv8mEmHxycyMD6fxLTL3xs0vLSkpWVyyQLjT2C0zetjwUTCuzkSkQuHw4YXaphkUuff4CVJ7ffLkTjhG7Z/ZSfLsKcS3dAOhLMuO+Cz7QW9dsC5WJ+Qpx3GSbIOORGytQkpl2dqPoFuZWO+/alXgHwoflooDUIR0geXNOrL8lKCWDKcL2c7yXe/7kWAiAhovms6OUeKVzhs6eM6cwUPnTU6OjkpKiopOlvwGFBcPGFhUNDC6c1JMTDKEyUpPgfi10E/6GxhBAmAlU9qZ3KtpqMtLe8ugXngprh1kk6s1XQwHod/sYd1fsEYmLJk1LOlAXESSVD1i+dDMmLD8VUMz2jM59xIqEn8WOhJL8KvzIMeaweJIqEhy3rOBsWMzKH5dhL/hcCLDJGDQ1GL6siZQo1UwhXV5blbKRfEALMQ73iPw3YQ7MF8Lz/Yqg4fKCaf59AvSIPwczK0CgM2B78Lh0Is/C5WIi+E7F6Zc9MVXoTv0IPhRXNDz5LcjwEkmc0/CJwEARpceDp3q7xJc0FsM/hSDPwX7MXjed/RQbbsuDWa0HYYCiXCDO8WEfRbO0JbYCAc8NzXla9iNjk/iT2HkT+fIGHsBKP4pbEBdhTvAi3CmXfAQol0j+c/MLhw7Z/bYwjmCJX/O7BG9R86YOYLmJ8FWZBUOApl8L4Bsa39ahRoG46EVpvz9Er4CQ15CEXgaXG6Ey+k8Awh8CxVeovBGaIJhRuEeDMFXXvr7b+EgnmvEc2EZXEfgY0CRME2KBAJ9KhDLjqJLjITmV+lhzUXsEGb2/OmogzCIyGQP0Ayk8/H8+31HdllydzbjeAoaycJYVSmq9XIelUkrnSKhVfCJFNCXpaVV2CrCMyer5NvC7G0221Q0w3EAPonw2/SZehK/4AqZOxqUgvsh/wfKsaIjSTlWbDQ7EI2zs/T8YQOAnupMYMhR53bvSHqcDhlskbyrZ6omd+jR5y1cjWeLSa1CZ3KQGGTsLw5om+os9J+wC8ftWPbY1DjfpHlpN/F3G8h/MOxmyvQs34RpSUu3wzM4Dp6BJ9HUV318jnkbYIuPUOWiSv1x2NrgfcJgPFDcrHKRwj97UJHwvdDx4Wf9Ct/T/DYqqlLWyx8A0cz6CFuAyY/qJNS2HjWpPfzJhf9/oseQqvkjL7xw9ewTa3PD02Y/XjT2q6/QuLo60muYW/llcMuTphYFBbmk17DRDugNgBAuWAjPGUA3Dc81d00lIHeRsh2KLYfajLzBeVarnnGeN8950Gz1idShA8XFH+DRHvDFD/EY4bysh6Hr16+fjoKwLEET8mW0H9XwJ7outANRYIsmz95cSznFHnsw726PCmymSZE7s+FqplxJkudpE+aPzpTbHw+GeeStNg3/n82ew3OPzp4zmQTQV4QegaCPpmai+QNnHf+vqyMs/4fqiIfURgwGAG4hOEogRiPTmzd1zjOZnmuXVFO4LIGr5mQsak5mJpzXmKNT8jb/Bbts07oAAAB4AWNgZGAAYen931bF89t8ZZDkYACBIx8E9UD0OZEzun+E/l7lLOKoBHI5GZhAogBOMQvyeAFjYGRg4Ej6e5WBgdPoj9B/I44FQBFUcAcAiWcGPQB4AW2RUxidTQwG52Szv22ztm3btm3btm3btm3bvqvd03y1LuaZrPGGngCA+RkSkWEyhHR6jhTag4r+DBX8n6QKFSOdLKaNrOBb15rftSEZQrtIJGPILCkY6jIjNr+KMd/IZ+QxkhjtjAZGRqNsMCYRGSr/UFW/JbX2oq9Go427QIyP/yWbj8I3/h9G+5+o5tMxWscbE6xdmVp+DqMlJzO1Bclt3mgtwOiPxcbmGI2o7KObO5lzmD+huI7lb9+ATv4Hvv74B6KY4+kdvtQ1FJG4dHCF+dH8hatOQjcCJwPszsXs7l1oo/HJa86vKSgqu4lmdQGjpXxPH/k1PEfj0DaoP7ptc7vQKphrtAksG81RySdb+NnazfUr/vEPiGj+1/jGKCizSSLCLPPvPi8Nn/39X/TWlnbvheT1IympZ/gt9Igueo8S+hcTPspAYdeXBu4c5bQmrYO/f9Z3nM7uM1prdkq7stRw5Sknc2miy+mn35BK0jFGvqGmJLS5k2ls66t99AVzPqpkHKWehigT/PuH+Lhj+E6QRZDDSyRneH+Qg/moscqXIcLLDN5FM5DTN7facniTZzlsY4Bepkvw5x/io7UkeJaDZfAm8lt4kfxGb/MKY6wuI8UbGbxNX9JrV7Pl8BZBDoPpFjjY6+MFVPw4OfndJYbLPNq5I7TxnZn8UVtmhEaSzsgYWK4ZN8gox83b6SL1qCFVKeBGENNNJbXmJLu2Z5RO4RfXnZyuEuVcQZsTn8LB3z0FW2/CPAAAAAAAAAAAAAAALABaANQBSgHaAo4CqgLUAv4DLgNUA2gDgAOaA7IEAgQuBIQFAgVKBbAGGgZQBsgHMAdAB1AHgAeuB94IOgjuCTgJpgn8Cj4KhgrCCygLggueC9QMHgxCDKYM9A1GDYwN6A5MDrIO3g8aD1IPuhAGEEQQfhCkELwQ4BECER4RWBHiEkASkBLuE1IToBQUFFoUhhTKFRIVLhWaFeAWMhaQFuwXLBewGAAYRBh+GOIZPBmSGcwaEBooGmwashqyGtobRBuqHA4ccByaHT4dYB30Ho4emh60HrwfZh98H8ggCiBoIQYhQCGQIboh0CIGIjwihiKSIqwixiLgIzgjSiNcI24jgCOWI6wkIiQuJEAkUiRoJHokjCSeJLQlIiU0JUYlWCVqJXwlkiXEJkImVCZmJngmjiagJu4nVCdmJ3gniiecJ7AnxiiOKJoorCi+KNAo5Cj2KQgpGikwKcop3CnuKgAqEiokKjgqcCrqKvwrDisgKzQrRiukK7gr1CxeLPItGC1YLZQtni2oLcAt2i3uLgYuHi4+Llouci6KLp4u3C9eL3Yv2DAcMKQw9jEcMS4AAAABAAAA3ACXABYAXwAFAAEAAAAAAA4AAAIAAeYAAwABeAF9zANyI2AYBuBnt+YBMsqwjkfpsLY9qmL7Bj1Hb1pbP7+X6HOmy7/uAf8EeJn/GxV4mbvEjL/M3R88Pabfsr0Cbl7mUQdu7am4VNFUEbQp5VpOS8melIyWogt1yyoqMopSkn+kkmIiouKOpNQ15FSUBUWFREWe1ISoWcE378e+mU99WU1NVUlhYZ2nHXKh6sKVrJSQirqMsKKcKyllDSkNYRtWzVu0Zd+iGTEhkXtU0y0IeAFswQOWQgEAAMDZv7Zt27ZtZddTZ+4udYFmBEC5qKCaEjWBQK069Ro0atKsRas27Tp06tKtR68+/QYMGjJsxKgx4yZMmjJtxqw58xYsWrJsxao16zZs2rJtx649+w4cOnLsxKkz5y5cunLtxq079x48evLsxas37z58+vLtx68//0LCIqJi4hKSUtIyshWC4GErEAAAAOAs/3NtI+tluy7Ztm3zZZ6z69yMBuVixBqU50icNMkK1ap48kySXdGy3biVKl+CcYeuFalz786DMo1mTWvy2hsZ3po3Y86yBYuWHHtvzYpVzT64kmnTug0fnTqX6LNPvvjmq+9K/PDLT7/98c9f/wU4EShYkBBhQvUoFSFcpChnLvTZ0qLVtgM72rTr0m1Ch06T4g0ZNvDk+ZMXLo08efk4RnZGDkZOhlQWv1AfH/bSvEwDA0cXEG1kYG7C4lpalM+Rll9apFdcWsBZklGUmgpisZeU54Pp/DwwHwBPQXTqAHgBLc4lXMVQFIDxe5+/Ke4uCXd3KLhLWsWdhvWynugFl7ieRu+dnsb5flD+V44+W03Pqkm96nSsSX3pwfbG8hyVafqKLY53NhRyi8/1/P8l1md6//6SRzsznWXcUiuTXQ3F3NJTfU3V3NRrJp2WrjUzN3sl06/thr54PYV7+IYaQ1++jlly8+AO2iz5W4IT8OEJIqi29NXrGHhwB65DLfxAtSN5HvgQQgRjjiSfQJDDoBz5e4AA3BwJtOVAHgtBBGGeRNsK5DYGd8IvM61XFAA=) format('woff'), +} + +@font-face { + font-family: 'Roboto'; + font-style: normal; + font-weight: 200; + src: + local('Roboto Light'), + url(data:application/x-font-woff;charset=utf-8;base64,d09GRgABAAAAAEScABMAAAAAdFQAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAABGRlRNAAABqAAAABwAAAAcXzC5yUdERUYAAAHEAAAAHgAAACAAzgAER1BPUwAAAeQAAAVxAAANIkezYOlHU1VCAAAHWAAAACwAAAAwuP+4/k9TLzIAAAeEAAAAVgAAAGC3ouDrY21hcAAAB9wAAAG+AAACioYHy/VjdnQgAAAJnAAAADQAAAA0CnAOGGZwZ20AAAnQAAABsQAAAmVTtC+nZ2FzcAAAC4QAAAAIAAAACAAAABBnbHlmAAALjAAAMaIAAFTUMXgLR2hlYWQAAD0wAAAAMQAAADYBsFYkaGhlYQAAPWQAAAAfAAAAJA7cBhlobXR4AAA9hAAAAeEAAAKEbjk+b2xvY2EAAD9oAAABNgAAAUQwY0cibWF4cAAAQKAAAAAgAAAAIAG+AZluYW1lAABAwAAAAZAAAANoT6qDDHBvc3QAAEJQAAABjAAAAktoPRGfcHJlcAAAQ9wAAAC2AAABI0qzIoZ3ZWJmAABElAAAAAYAAAAGVU1R3QAAAAEAAAAAzD2izwAAAADE8BEuAAAAAM4DBct42mNgZGBg4ANiCQYQYGJgBMIFQMwC5jEAAAsqANMAAHjapZZ5bNRFFMff79dtd7u03UNsORWwKYhWGwFLsRBiGuSKkdIDsBg0kRCVGq6GcpSEFINKghzlMDFBVBITNRpDJEGCBlBBRSEQIQYJyLHd/pA78a99fn6zy3ZbykJxXr7zm3nz5s2b7xy/EUtE/FIiY8SuGDe5SvLeeHlhvfQRD3pRFbc9tWy9/ur8evG5JQOP2Hxt8ds7xLJrjO1AmYxUyiyZLQtlpayRmOWx/FbQGmSVWM9aVdZs6z1rk/WZFbU9dtgutIeCsVivND1dsWSG9JAMKZOeMkrCUi756MI6AN0g3Se1ellm6GlqOXpBxuoNmYXGlgn6D/qo9JOA5ksIFOoBKY79K6V4qtC/ZJy2yXNgPJgIKkEVqMbPNHpO14jUgXr6LcK+gbbFoBEsoX0pWE55Bd8W/G8BW9WNboZ+b/KPyWslDy5K9biU6TkZpY6U6ymiLdUv0Vyi9jvt1boT+x9lTmyXzNUhaHKIcqyEaDkLfw8YTQBNDpo2NHmsVjZtrl2u/kZLmDlHaT0BJ1HTZ45+gbdfTSznJVOK4WQkWAAWgiYQQB/EVzAxYhheIvASgZcIvETgJGK8NfDdgN1GsAlsBllYO1g7WDtYO1g7WDrMcAK+a2UA6xci+kp0i0EjWA4s2nMZO6DNrE4zDDbDYDMMNptIHSJ1iNQhUodI3R4DafGzG8JSKEUyRB6VJ+RJGSbDZQSrWsb+KJfR7OAJ8rxUM/Z0xq6Tl6Re3iTyjUS9WezsQ+7e9L7j24G//uznFl2th/WAOrqPNelG0hq5z6Srk6Ub4Kau0Mv6qe7W7ZQPsxIhPcgeX3sPns6DCDjYSX/9rj3/7ka8bbeNGQXHE/UzyZb3Naqtt/W+FAepZ1J3mVOWPoW7ipYzFE8hSiE3Erfcabyo/I+kF7TVzPBMiq6VU3Wr/FGy9F2y1MD5aLfeG7ukh3SKztOQHtOldxmvgTW/3uWKBeLrqifdSuxbPeNypiOTPb/StfqBbgBrYCOIKkifoH6ou3S//oxFky4jLzLWvTSoV/RrU96pR/UY36Mdx9VzerNDbA+b/M8UzXE97TKTYCcvdY079Fxl8v2duY3vJb3Y3lvbjK+QWdMjScujKb226ze6V0+AH9gHId3G3ghxPk5yZs+m2BVzo4j+otuYZ3wX5ibGa4uP3R5tYufcaU32pGm7er+ninU2ffVaVz47Mt+tHXstTVvae0Cv3PeYTjqG4n5v927ukWDyTnDucuZXdXEerpqzcsc10D9M3nKnmNPFnZ6n7nOlY/RxrdBhYDA7yovKyx/Mq5N0vr6l67EIaA4ne4k5369QP6Kvpd4r8RRjZ+hP4PPkPrp4i832qOJ/AP1E1+ke7uE9nPDWJJ+Jrx4Cu92zEZtr6m93h6H2O7CDtjENA6eSpZOdzwL/84C8m3g93kuyeVN44C/L1LyIT7J5D3gNqz0SVjloc7lZuAc7/RfC3NHu/+dBU8tP6vORAnN/90poeoM+5H3vIaYsM3omo/oYwfVdgLgpk6+vWxvGSuQWfkuMV4v5+Q1TAaIMIr2ZVYhyIWLzCipijKGIT4qRPvIU4uNFNJz8aaQvL6NSeBqJ+HkjlcHUKCRHnkEKeDGVw9dopJdUIBkyTsbD80TEIy/IFKKoRLJkKpIpVYhHahCvTEPyeGVNJ7oXkX68tuooz0SCvLrqiXCezCeSBbz//bIIyZAGxCOLpRGfS2QpHpYhPlmOZEkT4pcVSJ6sk/XM1325WdKC5JsXnCVbZCtlG75djiSFI9uwkwE37hv6Md6G2cx+NJYVzKs3MxtPlJOQ/sxtqjzEO7FaBpk5PMIMZtKznvgGm/hKiKsJPjcw3oj/AIgWgIQAAAB42mNgZGBg4GLQYdBjYHJx8wlh4MtJLMljkGBgAYoz/P8PJBAsIAAAnsoHa3jaY2BmvsGow8DKwMI6i9WYgYFRHkIzX2RIY2JgYABhCHjAwPQ/gEEhGshUAPHd8/PTgRTvAwa2tH9pDAwcSUzBCgyM8/0ZGRhYrFg3gNUxAQCExA4aAAB42mNgYGBmgGAZBkYgycDYAuQxgvksjBlAOozBgYGVQQzI4mWoY1jAsJhhKcNKhtUM6xi2MOxg2M1wkOEkw1mGywzXGG4x3GF4yPCS4S3DZ4ZvDL8Y/jAGMhYyHWO6xXRHgUtBREFKQU5BTUFfwUohXmGNotIDhv//QTYCzVUAmrsIaO4KoLlriTA3gLEAai6DgoCChIIM2FxLJHMZ/3/9//j/of8H/x/4v+//3v97/m//v+X/pv9r/y/7v/j/vP9z/s/8P+P/lP+9/7v+t/5v/t/wv/6/zn++v7v+Lv+77EHzg7oH1Q+qHhQ/yH6Q9MDu/qf7tQoLIOFDC8DIxgA3nJEJSDChKwBGEQsrGzsHJxc3Dy8fv4CgkLCIqJi4hKSUtIysnLyCopKyiqqauoamlraOrp6+gaGRsYmpmbmFpZW1ja2dvYOjk7OLq5u7h6eXt4+vn39AYFBwSGhYeERkVHRMbFx8QiLIlnyGopJSiIVlQFwOYlQwMFQyVDEwVDMwJKeABLLS52enQZ2ViumVjNyZSWDGxEnTpk+eAmbOmz0HRE2dASTyGBgKgFQhEBcDcUMTkGjMARIAqVuf0QAAAAAEOgWvAGYAqABiAGUAZwBoAGkAagBrAHUApABcAHgAZQBsAHIAeAB8AHAAegBaAEQFEXjaXVG7TltBEN0NDwOBxNggOdoUs5mQxnuhBQnE1Y1iZDuF5QhpN3KRi3EBH0CBRA3arxmgoaRImwYhF0h8Qj4hEjNriKI0Ozuzc86ZM0vKkap36WvPU+ckkMLdBs02/U5ItbMA96Tr642MtIMHWmxm9Mp1+/4LBpvRlDtqAOU9bykPGU07gVq0p/7R/AqG+/wf8zsYtDTT9NQ6CekhBOabcUuD7xnNussP+oLV4WIwMKSYpuIuP6ZS/rc052rLsLWR0byDMxH5yTRAU2ttBJr+1CHV83EUS5DLprE2mJiy/iQTwYXJdFVTtcz42sFdsrPoYIMqzYEH2MNWeQweDg8mFNK3JMosDRH2YqvECBGTHAo55dzJ/qRA+UgSxrxJSjvjhrUGxpHXwKA2T7P/PJtNbW8dwvhZHMF3vxlLOvjIhtoYEWI7YimACURCRlX5hhrPvSwG5FL7z0CUgOXxj3+dCLTu2EQ8l7V1DjFWCHp+29zyy4q7VrnOi0J3b6pqqNIpzftezr7HA54eC8NBY8Gbz/v+SoH6PCyuNGgOBEN6N3r/orXqiKu8Fz6yJ9O/sVoAAAAAAQAB//8AD3jarXwHfBRl+v/7TtuWLbMlm54smwIJJLBLCKGJCOqJgIp6NBEiiUgNiCb0IgiIFU9FkKCABKXNbAIqcoAUC3Y9I6ioh5yaE8RT9CeQHf7P885sCgS4/+/zE7OZzO7O+z79+5QZwpG+hHBjxNsIT0wkX6WkoEfEJCScDKmS+FWPCM/BIVF5PC3i6YhJSmzoEaF4PiwH5KyAHOjLZWiZdIU2Vrzt7Ka+wvsELkmqCKHtRYVdt4BE4FyeSoX6iMiRPKqYCxShTiEh1eSsV7iQaqF5RBWp7FaE4o6dwoVhHy+H5apHH6iorqZf85805OM15wrd6edSAhGJjfSCa1KSp0jhWk4gFiFPMYeoEleg0DpVcNXXii6SBCcFl2qieaoVztjYGdUOS3XslExxjbAHX+fyZYFqoTQgdCfnvz6snaPcl/AK611DiLAGaEgm6fRmEkkCGiK++MRwOBwxARkRsy0OjmsJTTLZ82o4OSU10x9WiaO+xutPSM70h2pFgb3Fu9LS8S1RrK+RLFY7vEWVjAIlqU5NdNUrifomza76iMlszavpbRIsQI9LjYezPjjri8ezPg+c9blUG5yNc9WrAZqndEna2etfp3OJL8+6s9e3p514oCS5argkkwfWZa8SvsIiNZZEMxzEu2qs8TYPXqrG7ouDD7jYq8xevfiKn/Gzz8C3Eti34JrJseukxK6Tip+pSYt9Mh3P871dHI9EumTkQkpqWnr+Bf8pvZNABJ7CgCcAP2Eef8K+IB/wBfigB3+K4K1rqGuwVk/bDRoziHaDl3/9z2ByXjs1YMwA7S14uY92G6y9SVfeQV8bRZ/X2M8o7bo7tDK6En/gPKggqTzfkY9Kj5AO5CkSyQMJKm1BDub6SJ6IPM3LteRFZBCm4g2rKZb6iJyCp2W3BbQ0v0Bx1KnpoKIko05WOXe9ku5SZWB7bkj1guDahhSvSzXDicSQmuWsV/3uerUAxCOngyrHFSteucYmprTJ9BcrZrcSLCZqiii7txPq8CdkwVngQlHYGx8OdSnsnJ2TTws7dykClUyjThrsnB1sI/m88f406vNKJl+wMJ9W8uWHHvvblsd3fPT225vLtu3l+PLnH//bs0ve+PCtj5TS7afoc5L63KqKSQ9f3WfnS2vfcxw65Pr+gLhi96r7py7r3e+V6g1vOXb/3fYxWNCk8z+JC8WDxI7aDdzpTh7S+aN2ctRHBOCImuCor+2amSfY89SucCjb2KHsqKdKjwKF1KkOYIHDpXp13UWFzYDDfDjMd6md4bAtaGlP+O11yO4am5ACRlCsds6HP1Iz89LgD6J27SS71ZT04mI1QYaj1LRiZArwIRyKT6VeKdgmu4gxqCfVGeKhfpp1mfcnrZ43d/Vzc+ZXjbprxNDRJcOG3VXLvXVDtJjOgTeqVsMbo0v0N0qE/gPmbt06d8CcLVvmDJk1a8iAIXPmDGmQhakdzz26euCcrVvnDIy9NXD4jJnDCHiz4ed/El4DvrUhHUlPUkEiKegVMpBx2VJ9xIqM684Di3oxFgVBeYK6eXeCw04utSsc2kGT7C7VB4fxcr16FfxGPmy3ChnZHWRkks8OTHInprZjTOqeLbt3EJM9MbVDZ11rOne5ijJ1ATaAdjgp7QUeDdTEbwrmOGgjV4rgUzkmB/WAHhXBRxiPhj+x1HnzwMiqx18adtsa+lynLpP+0u81bumM2w7d9/Hpyk1rR2y7VisRTVzBtEEPXXW12q3TPSPLJtN7K98YYxvz4l+rNq+dOWzB1TO09OuUMfM+/+th8ZGBt9ZFZlVffw09JpqEzJEruEN9Hr1pYYeSroPGLgAbnCb0IceY387WvbbhsqkiXeCvkVGN3nmauSxb6EOt7+3XThK05Ye1TtxEaSiRiYdQxc0YbAWr87AveQpdpCidSpzsc7mBDdnkYRq/SUp64vDhJ5KkLdoJrqeTjud6l9C/3B39Vdvu1bZHfx1/7RiuM17brXWivza/Nl+n2puu3cUtF7q4nKJwPIHLE1PQ/fiRow8nSS/TeO3EZkmrKOPc9EYv/QvnK7u2JLpXe8qpPRx9bwzbdyo3m78B4oiD3EMgpIKzoQVUcbL9cyB7EczExZy5kp1EIQjnv0NUQvPfQfd+ovP+TPTqDoW4FMdeQaEuhdvLqZwjP58qDnSmVBU58Dc20BQeY6jE/IrIh/ksv+gx2WiOJzWD3iiMNdO+Aa3mm9vq3rvtiHBr6Uw6VVs2t/Re7YuraCft4560PWH77U+WC52EHRBlbyEKKVBMYZXa6hUxBMJD70is4DQpwUPKo6OEsGutY3EcdFwIRSxWfM9igo9ZLXhoJZZY5AW3D6EdXL0clPvTyHT6utZvOjetnH6i5ZdrafSYvofBmkadZBfoTBbuATXG2kxjQDJoUwKSKxY3qszgfhXj4Iv+6pe1E/p1OnHdOBe3Biy3DV5HpVI9/lBFKAAW59XyXtREwB7G3nyd6Ddct9JS/G41vHQk6+G77WIIxl7feICXQAny3nr2o18CsUv10vXr8ftp5x/g/s0wkEwAMiHwgVX1z/lpmKZxoyZEX5gtdTjzKcNMi8G3BA2f3I1EbLiQLMW8MTqVFN3vOpv8LjAi1fCwqk0oRlZ4ZJc7HHInUhcXbMN59PAi695x8ekjR/44feTw/1SqGzZsU6qrt3KFtB9NpCHtA+0H7XXte+0j2omavv799Dd0/Lf/+c+3QMeu82e4DWItyKI7iQjo7zjcEeVcGXsLEO8wsQjACidslkeBC9SiGzNoMxMRMjcLRL6L/rtSNN865Gw/sRvyaDJgLBloToKjiAMptgHFaCRqPF8fiWdXi09CLUvWAZPMABPYpSrBcpIHPyDZQdU8Eh56HLByCrzrSZTdEd5mLQamqDbgj+IsVuLliEQ8xSzIZBvO00T9oI6FNOYefcHJ4h+f7Dr2zGJtMsf93FBJjy6c+OzDGzZPFjw7Gg7vqPyfFVo3sXQEl/rUOyOWrH91JdIx9vxP/GmgIxe0JtIW6RCBDrEtbkkEZkRSkCQvkORlCMObYMmrtce1TYGQakfR5unuACID51L8iDcS4DihADEFnEKUgRBDyXIp6fiuDMdyAaKTiJzOMEscEN4ewYcfYgegjrYsdsQB4FBJVnGxYpeVNgBJ3GpienFL5JEHxsMOGPU5jYxhyCPYJnMsV/7Gs6u27nhp2bI161eueLimnBP/3L3/h3nTliw+d3CP9jNdJC1TXnj62SfL1sxesvbFxdLLx+p23729fc5rc/Z9fQR1ux/IuT/YgpU4yRASscS0qJbYLJwdgDoAZ6lekQAYuwoUS50SF0LlVvhQxMxciFkCJloYPLagN5FRuWyoXLRY4WTFwVSMhmVAkqBnkJjkmPpxax44frwi+h2XKoVpeV++oSGrVHuclpfyvbiJzD9sBZszw77SyX4SSW2UW2qj3FwoN4+tvsaR6jLn1fptqS4Qmd9WzxC8s64myUkceSoHcRxFlOSMAXPmyx1O9OVOh+7Lr9p8ZjH6clFxuhTXXjBixbN351UP/tkVztpqvA6PJy8CrxkPZTwUlEBli4nizacRl8erw2aqmtHTpxYrSaABbtRsB8g3QsxJxRfIFERpyvEgpO5Fi7q4fV5wBtlbufHVy9a+8MITDz8ZGH0ztz+6rkvRwik7jx/9uvYXOl168rkDO9cdHDrMxadOjp4JdeH58+TwUe3PdwjzTyuAV+nMVnPIXSSSgNxKi/knG19f685MQIjoFoE5bZk+J6OrCinJLmSK6gPmtIPfgWTQUMHkTmAampkGGupzAgS0uYE4c7EiyIoJqZE7E9BEvykfAI2UCgYKbo0RQoqak7mCpn3cf3lxenH5wLWf9dg55cDx3w+8o52r3Pv08m0vV03fHuBS6OQG2qtNRklGWsP78weO1H498rn2I23f8PGv/3pxW92cu5guDAAdRV2II51JxIwaik5bJWie9gLFXIfpaixFg8CnOlAHiRk2zRfr0cNKeVOwyE08A/jXT5zNtVXacqn5C/GGsjLtx+gebemMGXQq91dqIoglxwA/7cBPPwlCjnw/ifiQo8nAUQuu2wE4mhPwWYCjObiFjoyjCcBRCR1AJhwkuNQ04KcbDnPxXBwwuBOcyM0ENGnhfckBJ2MxMlx1E3ACObLq5OF3B7caJxXrULKoGZJkNi+AzTfnsKfZ8ZiqRfcuPvn3Xf956N5FL2hnP/hEi1bse27FgbefXnGg3ZYli7aqCxdvpgvm72nXVrl/10cfv36/2rbdnnkHPv3kwGNr1z360JYtXMH8Vavmz6l+HnVqKPjNfxk6BejIGot5LAJkAQcS0qw8cCBBatIpbz0qFIQ/JRBSTV5dp5LRFdhZymV18LpmyVb9XAK6BzUL9Yz4dKIJi5BeAkaRU5RGWQKBuJkzcLNO7FByftenmnb6i4Grr4vvu2jwhgOFNZPe+m3W5uULtmVtX/XIK/zuozRXO6md1QZHtfq09DEZKV9/uHzEGOr9cuOxRSUrP/zytG47GCSCQldWD+nQhCYYIEAsYUbSADshlAAvyBCFpRFR8PCzculSwBX83xBbcARhTo7QDWKyhXQiEROgalXCC1ljAEkxh7D8IeH1CljR4AK0ZMOXcYCY0pbGMJOwAq+u28IMfgn/EVydgFf1UZPPT30D+O7RlRMmcGX099F0xhztlxQpRTs9B/fzFN3Af85vYvQl6UjLqlNnZdQZxKCNUPh5iu/TsJvvQzeMG0dXjRunrzkL1nxHX7OokBYV5lBYeRZXOWFCdAk/YMYs6k4GL+CcqT04mvH0ZjCi65nupJFJJJKMPE2xx9CDrSV6SNfRg5uhB4CiSnIIzaU2zUu6C3lKXCOkYElsXBLoCh8PhuKRVYsLHW18CjpaKe4C8OCgviB42Bh4MAWRqzfzdRtq3l00o1dyBc29Y8JdS+bcD1GHtlkmlLy4+9DmxR9PLRwx6oG7byt/Ztq8h5fed279ypVAzwytu/S5+DAJk2vIFhJxYrXCElaLxHolLaR0KlBzHfXK1QWqD35lFqg8Aq++zCRyIOfO0X2sBMlEP70ydNW+s1P11KGnS+m1FzzLGSVpL6lJSu7ZC+swtPGIhZYcsCCVtgWaA3Jvi4WXM3PzOxV2w+KF5FZNbZAJzlz4TId88NVXFwE7EhINdrhJIIPwEsYYI/3s4mauO8xLzJ70D3AkAMd++EQGofobPWiRh/n3GW76Ga2gi+lS2Vr3wcB75MLnyh5Y4vGf2Dhyaj+OD1lvKnr0RZtbU7Sntb9rI2QPnUhvHlLbK733B3dqC7VRXLHr1lG3P9KZFmQM7PigQr+mGzlJS9WGHNb2lQ0fNfqXgxoNFxZx0X0LR515iy6i27R22jxtkdahfbB/u470Nzp11au3T4UMlsvwJ/0M8oCsXvgG4oEJMqH2us0qfJgFhVrJTCi4JQlxQFwBy21UipHAigVMAPdBPsB7AkAo124KlzXr6Wjp07u5G7WvJVE5exN9WhvHUcg9WBzYA+ssZvmhH9Ycb3gHJ3hBFn8y0Av62XLMCwaYyJ3o/kMAJJje2pz1NaLNYwYDgPMpYHagyG0o/slCKlH9TpYioi+ECJuhY3JIxJojvayA7uUDhbGDPfSl76JzJy7aEP2HNo/Oe+HV6jXaRDqoasurivaBqOzZW74hI+HQwv2flK557IGNpcsWP7RMt+WFENs2g22mkrGGZXqAHk8yg+jxgKsYaIgDPBwn4Lk4CxppGiPNBSS4WPVTsYQYDDaF1HQslrhA+4TkYqRClRJRIeM8cMqUoFeNXODVBUj9UZ+4VOp1o4KF/RLEM7KQ5v72I3V5uPKEd17d88MPe1495C/nPNrP3/+m1XGjT9J4OvqPb6Tte7XDP5z6t3Zk1+vSl+fonehnUD7vg3wsxEM6GtKxxqTjwdDsjdUiFKsLUQHzIz7dfcug+FgzCAB3SU/amSBXq6mNjtDWa79DutXxMPVrP36ufSQq2nNa/evaj1pVKc3/Yfdxms94iesPhfVt5DpjdUtsdQF0Q9RVUeSZKuJGYmk4S9EtgFQUa0jPx40kXE/A9Z89/FMNx7i/R6/hg6JSFj1aFl1fShrXHcXo7q2ve/GaJj3itLamsaDtggX38C801HEHoj1wsbfujt6ur7Uc9OUD0JcMrKmlxfSlFSWpTUhMQ5DJ8uFAK/qCkNMUisQzVYuHNIvZga46aaA6yTKzhwRQHCW5WI2DNNFAmy3Uxyfr6iODMchMg5bTwj9+ohYfNzlp364Dp7T3n3g3S5tNz3XSogc17XVuCMjUQW/9aZe0fLt2/Gvtt+PaVzd3pLPKomevm0mHNfG0nsnyKsOjmHSPoojhWivPuGptkqSN9UcUm15lFljDpFGG2IAJQ64DTK3ge1RUNBwQleit3OazN3FV0RJ9PUi+6M2sBhFoJsPG2gVcDX/ExiseqUT/pH/3FsBmKnzXg3rnaMyNHI25kYVdCpTfHctcWQ5k05Vfz1UcwGsL5CiKu3l+AithZpmTXdj5Fq5843OLNlee3PV+xVS6TKpat32F4Dl38q2fxpXtNcd49jPzjzGeWZp4xtsZz3j0jM7G8ggXwooaUXm7nlFQPaNACsE5+y0U4nQQ2PYW13MxF93ALeIejT7/NrCvhKsSo8XRgMhtiQ421jbB2mIsAuBKBg+lGA8jPNN6XrTEKphMOL49lRwY9dntTfYkdYRryeQ241qmuHAjJbGKJkvsdUaa9AKkKhPGSMUs13BinB0jskmv92F1JcLbHCwKM9ooaoQnhwapySPvWc35JS6xqsIqRb8bHD0u2WA7msiBhjzAzebOakIDjS6Jzm7SzVNMN6+9SDebKyRoo2Dszo7ixt1xLGszG1tSeUtsQ0WootQk76nku0ugowchAJ5Lo8I/z94kHKfnUsG/zgLb//7Cupc5VveyXLHuJdj0uhf4/5ivzSAeNF83+Fssgvlm0Y6UUIF20d7VGs4T7cPK+o8+O3nqHx/9iK4/kY7U1mo/nNS+19bTETTpZ+1bmn7q1AmaoX17QsfvyJu/sfqFh/Rp7g3B/9dabEwHLS1DgS2E0cCJBV4jGqgem9wy8AYDibQp1v7+r3Pn/qUtoHNqt9du1xaISv3efT9G13H7X1n28Gv6Pmadby86gFcesOebSURGXvljvEpDXrVhG/DCBrwuNcngVRBLE17Muh2yjbWjZEiMABXIumalyaBOzVjo5Ux+UxbDaZdg5MTSs4O1P7s/cP0lubleOzP4RP8zqakXs5Qju4CfH4nbALsHSamhbS5d29QgsDQxmbE0EVmayShKAoqSQ0qSnvmlM/SuiCE1C9UgSTfzOFmRgapEomMd5uqV4EVYB6BBvN8Hfp41jZqJYBc9+e+zD85YXJGRNSMrbcsqbSy9++CO7a9oD4nb3j847ZXcNtsWLu07oU1C5oJrFz24KjqJ+3PN4sdXge1gLl8JculAyluv/2GTUU2BUJYi47mUhJYdxvbNOoytNBTN7bGmZ5ODLK/FJmKNw5fVvtUWYmY45AdCfaaWLUQhKKG7HcNN0jZv+Sxy9NQf1HP4nw89yE/6UN12cMc3P/2ufXf0i7VVdIX08voVsyue6dZj77rqT2ZP3yqK0vJdz02b9GTXHu9Vb/2AThp3SEJ/0QFk+BjDx2C1UvN6icKHWEor1aHuR0RWmRUBFEQk1naVsILXlBFiL6CDUKLZKrFScnaHeAPzR9Ws14b+skjPhlTJ8L2KtdFd8lgkdOHFWPUD3SWkLljsZaVwiDONAQfLGtWVX6m1xyq0o//+QTtGP+O/bMja+e6h1/H3zw1R3Q8i7v+Q4Z6AUakkHBs1QKzDAI1KLLGiT5j6w0WI9zMW0B2pkJ9uXxD95xTwcdeOHi3shFBKSTH4fewD+EitXuNRnGF2yQjFAACXjWekUEjVqUuNww4hyl7P4t7485erWVufuBTfXofe/9m5r+rkcaOUmO9Q5L2q2XdGVEzwxuyfb8FqIsSQGpfs9ORF4LVZQbGGM7tklv3t4Exmp0v2NXXlKaxthGziQ8fKvDiQmE6RRP9VFAmlOUETDRbPpJb2UhHtPIV2LpQKqGmG9tAU7bVsKUvbMRXIP/EN/VbwnjvxT/wFvv6OZ589t07nb3fgr8LiTLZh+eYwKwYbcUbPpjiMI4KVxREL1f8PWmh3elpLfoI+S1c9oaXQ049pt2m3c8e4D6LLuUnRUDSNWxCdA2sEYI2dsIYZEbupUYY8LGApUEx1DKFbEambWPQCivUDpBfWooirltG9dP+y6MkKUWn4nG/XMCZ6gkvWaYDEQBjPdCQ/FstjeJXn65sUxaRXqAE0G425cCENYBEk4LuTH9bwBv9xwzp+9gjh57K/noszcMI67W16UpoHdlXIKimA7LGSQvlYnajW5CV2IQ9RDphX7C8+FDMpgB5BOexbR2/45BPtbdOrZWe8ZXDdjucf4MVYP4q07EeBkIMd7+NG3ScqZz6FzxLYQ3+2h15EMRXoRl2A2J/twVQHy9VK+sKSS6VghRTs3RXbjClW8fFB+AcEHfj0U9pf2/6JdKLsz+uxvsQd4RoY/xp7YwbLYC8sfQYt4wfQvGE0d9qBNCntDfjC59F29Pi4cVqKzid6fhU/lWXQSc2wGR40IywM7oXyUxoeK2XfuUPYSfeLB4hA2hC9AcELxIWdRZFxFnLyOAG0Qt9IUdgTvINbeeg+cY+o/YHx927AxG8LAyFq5ZMTemarJIUjAVw9xwoZLhbizBDA+PYBD+JSLNIUMPPGgm2mS7Ghp2cTAECvG09hDTcipOaGQiFI0zGtVzsatn/tb/2Z7SfnC0rqXlFNij8jKAl7d+799XcLs/IEV01iQpInT0l11aSkJoO5w59N5h6Bc8zqExJTUmM1n8SURnvPtLNBFTUNgEnEE8hhzTI+AJbnx1zJLEdszni9xNM5s3usQVYAJt+5iFXAwL36IZAWNp85KITP3E35r0499eDsFydxk6Ztr/nC7pwdZ+3x9uyqbRXTx89/s/1/1u2nGU/XPjht4ZzhVJKkqcNG7Xg5eqJ4QmHRTe1uK9+4dMjk6SOPLWOYZzXEAUlKAE1JJ6MN7GVHhvsA+EjI8BQ8YH01iWJczWAMd+uJgOyqV9wuNQHnwPTujOpG2OPSywh2JDkF3Z2LN0CrzDoNst4zyTF5jPowIiDJtLqyy8Zp+7/66o2KzYV2ue2a+1dXPb969rNZUkK0cvhd2jta1Peb9s2dQ9fRjJGTfzzg+5Dys0Yz3RsNuvMO051RRNeYeNDX+ECsSBkRkBYnYAQnS3edNqRFRz8eoMXjUhNBL+JCaqqM5V0GfRKxACIEWHEuHg7NqcYEjbslDEDMg4Ew7Pf6vCbIvbjRv34Zuf9ebvy2uVurNygVO8ZxlbPXH/0PZ849QTveU7ZOEqUFq878PXfvn0umS5L4aEkpLWDymAx0fGrI404dr+vhGeUhxOQhMHkI5pbyMARhsoGux6SR4EYSnKBvVhmU0ZBGnMko6rBCImYROc0L9LKepU/+8sCUDUUV46xdXr5335eVq6umrcpr9/T0qjX0vI/ytGjUEG7BmR9X3z6CBn478OPYEbRh5H1a9ENGxwig4yOQRzzQMYxEvEiCXTJISMWqm8UrxKpuGc1LPIlG+oO7T7QirLZ7/Swtk1WXjLKw2FGhZEMWhE0rBXz61rH+2YZ4/AHdnEZQ2+63jkeFfVXlVV3DPV+f/67223yOm7Hh0UW1NFr0Iw01fFKW+sofvbrd0rs/bU8nimmP7H4X9KkPEFEjdSB+ciuJxDOrwPgjWQAk4WykHFaJCGoDWCyhQIlnExo+rJWEmk0URuJ9TP8QkSVixJLQJVjYvsN6W6ixAacjtT41654M9A06E8JtSsZSTtMq+cMlVesiVstdkmlWeVVJQ1v+MNMTrT9fB/xNJXlkmlEFDIBmmGFzOpPbmpkb9GIVtT1jcBrsL83FsE9mKMZuNl1WoHYAbqcR3XL9co0g25ONyToTcDwZ0htA/2pbe/OKIFOeIr3a0HqnJ6ZIRw/eu7HIUfrDBwOVPum9H7256oWijeX7j1Y+DyqVm/PM9Kq1hkqVjthy7h8f/5odKM0I7Fi75JahtM2v++vH3UH/GFmpNXygx6YqCEtfgI14yAAD41jDuq9yoq9yNvkqb6N9cyE0cZvhp7CCYvMw1ACmTQy8GfNO4HmD+kyHSa6q7FJbuemVymUzZr6YA27ontET/vFNtJRbrTw7f3xUYrq+BTaVCfthc76x/BWVBAOl0KIB5dQbUM7GBhQsiQ2oLRUVFUK3c2+K5Rs34jXPP6L1p3lwTSdQ2ZUwsaI0BQvAFZdCMc5hT99VoMp2PTMG2ODSpeoOGfVRXpdJrCKUje2Te+2urr6hYyqefzStkAoV2shS0TqzUnjy3MTq7VZTeqxHtQZ4jHNljlhdFOtCIs6X8XYiYvA11Ud4OyvNMFZfuj4ktlofWlM5hy5/mNMG0a/5pVr/h6SEhpH0gKglRF8VOWf0P7CHJr6mkEbo0XppbUuFlHDmR/jOCsgH5oJdZGGuyHCLKwXrQGgWqCJKXBjtRPGB4Wazi2Xp2pHlYkUPVuJng6hY+lRzcDJE1w8lVQZ1UVLQgBVZVuN86IsCLSoyfqY+/guUyNtcoVaMt3XeUjmrOrPT9gVbdlU+MmfZCjed/tjsuU+lCd1q7hxbOXPq/O//E13KTX/7xa1LTElStIKbfuCl+ROj5pjuHwH6Wuh+I3VoAJfXeo9BjE2+SPf9F+n+OFtndbryauWyeXPWBIVufx8z8fPj0Ync8p0rF02K2pnu48xmAuznorkq+v83V8X8OEllXWNS1KIsAhjm8BEqaecOf6Gdrdz9cvWevRs37ubiAqdwsupU4BftQ9rpl13ncZoq8Bo6TaOes1obJYiwN4ylQ4kBa6T6ZuyCWApJQCwAybrtcC5WJGyOaWRO5xpgGrt0AabxGJxrxDSJtCWmKXV22cRAzdRNXdqtmrZ63fqq6c9ka6PELzYOK4lhmttvin7IbRtadmK/7wMq3DtC9/Gj+A+M/d9pZOm4/yYfnwKZg63gAgwA4kaY29K/IxW2RixglplbbwULFGGJs3UsMLm6S9zYiqINkxgWKH+2fbtn7m3EAnfcvuZsNpc/6FbEAj+V/pVzD52infsw5q+554EOF+RcTd5R76vHxYGKyI2tBsizcNrHjf4jjsTuWQAO+3TLMuUwxbzHWVA10Z/ncA2d8kS60K02bky5SSiX5k6O+mC9SYA9VsN6Hci8S9SL6GXrRaT1epHPD7gKC0YOI+80p8vuWjFODuI0mJIlKwmx+hFx+BpH0HUXHBtBb71+xMr1RZ0Bz5vUygVPz16377WPN78yvoyb/My8Bx6Y8tIbe7+sfbN8PKXtpPvGTb35xqmZuQ/NmbVp2O3zAd4PXTjlxv4lWXlPzVtcPXLoDInxPPv8T9wUcRDgl9tIxIM8iItBF1GHLqbm0CXWYYpvHC6Nt7SELtgMRHBAZMWpAxhZnwdrhruyC+Xs16f//POA3qlFme602/OmzgX4Qn3aTyXRq8YNFaWhdsfjz3FvwP5Wgow+F7rpfgwtUy+3SmZjk1iE8l5QhFLsrDDJ/BirQ8msKoklFSqx2kqzqlRRI6rNXlm5eNaStRmV46ydlcpN++hb3L3RZW9unjGe5869qd55N8aN9uBX98N+mtWl6JXrUu1n0dyglE2zZ2mlo4RuDZ/NncvnnXsTvno1IeIBuJ6PfGPMHjmcEIfwojXUhH2GVktT3sbS1L6bfj7dSmnqtxPvtihNWUS9NNXzvVND9XmEOEiD94qKHSead+7bd/IelsuaXDVmkwVy2cbSFfzZLJeFc5jLbufMFptew4J8treVM8HfjmaVLCO51YtYBjc8wI3Yq1FcCF4961A7Kfz93d93ljocnKUdLPulQOp44m6hWzTrjTe4L6NZb77JfXnuTe74669HU4ArIeB/LfCrZd2K/nd1qxCdqz3xCA3SrEe1J+ich7X3tPe4HM6jXUt3Rk9Gj9D3tTCsEQTMfIjJxJiVh2tjh9UeVmVEyfEFyHwgTW4uaJAz0yID4F5Fg4tou2yJXveglpv74HxfD4cjrjBu4MhAMSjAT/P5p88lTlppEcdw4uS/Lme2iDc3bGG61aKehU6IN/139axh3MPRJbwzOoXbM4SfeffQhoVGPauvNoFbKfUkaeRGAuZc63eQRCGPzQhBbLMU1JrZCTajk8wwKHYvIM3NYJT6gZ8ebPpTGY3b4lZFux4OWABjdo23gsQK+ya9rt/3/imrXkmae9/wO+4YXjEv9ZVVU7j0sQ/OPL7pVNGgdoceOz5pbVbOuonHHjuYe1PRyZePzVjK9hrRfqV+ViNLIS1bpa569mOUy8ByI6Xar9LuM33Y9yxA450xGtMKaolOo79AjQcaHQW1ziYa+TrFqvep3QaNfhIbbIjHqKc43KrVzWjsRRmJOkkoXpbH+1g+L5kscytH3nXXyPvmJu14rryionzVK9qu3IOPHStfmxlcO+X44++0G1R0atPxGYvHLp1x7OWTRbo8HqPVQj3vIYnkJoLo3GKtR73iUb+SGLHGXWnM3IHmZCyuJyKIZJNQFuylk0S2W1XywG8eQrTdmCbEEKjHE7+edLHk0fdY1cy/Pjn0qvHFAyaUrJ0+5IkhvSd2HXQP/eKBHTfcWByeV+Kcv+u6QV0Kp4/R9zjjvI3/TswmQTJDr5UoaWE1XqyPBJj7D2QY5RK8OcEJpwWWUQniRRWTDL1vns6yGoyWRgklSa5HKWAJJT0D6MEyl15CqbHaEpP1yFjY2d3yfqymKko8uyUrm5vxwd8rq97l+cYyynhO+MdTlbvf58y5R2hOwldfyu+tblZIWbrP/d1xP80BGvH+wo7sXqJn9fuI1FRIlxJDEQnTeAdfX0toimTPU9xhVn/1hmpsKZIZKAyy+1Nk7DwzdMATnLfgUyzoOxUfYoM2QHCbAoULs5QfFC0ePh3fhgVML346Ppl9Wkfe7no1E6ck0KoTEXmrksMAvWGeybTxjjScKQbJmnBmPtyLFuZc867tH5HXd/F8+dLK2U/Y6D7talM4n6cNg63XXmviFpTRtu/Vf7hV+ttSZY12uEwZv693aanz+0ol1kNaDvYWjxUCR7M6fa1LdhA7G4BzIYIM1Xp97ARAAy+vQwM/wiGkzc7GHSN2NppgtwFhUijiYJmfwwV/eUMMKtsdsVq/r0WtH0jx6bUNcGX4r8MyWk03LtOK6b3acPqiNrxCv8GQThWVaAfu06hctq1M20mvhV86jl8revgs437XHiTWNVeJnWEWvS/WOOeJVeYErNizRjqWzOGvxn5YGBnrW7uVtt0ielbDf1jhHn/+J/EP8QDEHj8g1FV6/FedDmPa0QcHmQwx4gGrvGWCidSG8yyZkAiH4WxemN3wWIAW0oXtIs5F8vTRxwT9Zj2lrUvN18dqO8Jf6SGlowtxbq3EPqkW4e19bWX3DovTx2emhPXx7TzZvV2Kc6eTjrrR6C1kvQnf7NiYMW7NksBLjKdVtC3NoVXaaO0L7bBWchudSAVK6WRtuaZpDdqTNGnHM09uELjhk8ZNmjVz8vgJwznhxSef2cEdod2pot2kHdQOaANphPbQ6rW5dD71Ux/E3PnatorNn1c9JU2ZVD2/cuGLE6ZJT1d9xmQ2k6zle/ObiASZIU65YqA2fs2kOfdoJ6j3HkfsgEv10JnaTG0WnWkcXHB/EWlx9xCoNSkDmf1qyCxEuuNM50VSqwWQgPPNeNdlJyahToD0lbah2sTu7I3ExvstL5BXCCQUDikhFxNLu/YA/FPBVwfbhkJKagux4S2YRSHIA1BsGXh7oTsV9D8HhNcJpwKDxUpYrgUREnxT6Y43GFxGjpfoo+fRRBq7naTMkOYakOYRXZqTIAPj6CQmzai2HKTLPVn1l759e5gtZVbhxqG7tg8aP+Le568kzehA/pY5M/relZY4rn/Xtn18Lt/NuV1uvUF7ju65+frb9L7xNGEXPSK+CRJor1tiLblEj0flMfByen6fTMN+ftqHT/Jn4PtWSWvAa5VoA+hKuKoTpz5MDP7H1SvOWIBnd6uY6motumgsLpU37s5m96dIRL8P2CTrFVU9ySoKG/OWJcNmDh6bekfcoNFVT2qrenYv7mCe29syaPDwiUw/F4B+DojpZxE6Kh/Dk/BrAfVqJ+6hOdqRTxqP1tKFdJG2yKMtajzQ50vZHKspnc2xui47ySoX6Gltq5OsvAf4c9E4axEyrPlMKyU68/SZmaGwLq56xclF+UqTi+6LJhcpbqjZ+GL0XX0vxhCj5DOkiLw8BC8FsBeBmEkWiYgYaSQG7ywFiljHCj7YDjaLLKE31MFGAecdwqveUWlc7sxPxoAcr88tmTqzulIG6dnq5FKgtcpSm9g90YKN3RN9heElRuelJ5joZNzgFeeYuC90dgjGvpONe7+DpKyVnWNJLCOspkL8CoRikMogIwVcS7oewdIZwKoN6n8Fm0hEXJWRjiTKCbYrkxiLepemcjbGwysSyeezgMnpsyMgbxmQRffWpkf8rU2PJBhZe8Tp9hUXtz5BwqTRcozkLRTARcMkYodG/eON/YA/gMwukZRcvCMcZ4kPqx5gOD4dIqn59tCX+3QW+9ica22i/ldi09YRo8djrcwpXWLjMR632PtnyNaLtz4/hjtYv1v8GvQbrI/8j37Xl+IP6zO6mdb6iKux490uzRXreHdi2w/A9gMXd7wDLtxtREjKwY435nq+kBq6oOOdkC8oSXtF1Y8db1+zjrfPVRPv8+uPpEhMSvBgB8vfrEoA51jH2xefmKR3vP0J8YmNHe+A0fFOtgFscaVltu+AsEXxymp+AWt+411C3mSj+W33tNL8zr5s55uFkWbtb6m+ttX29x9MaZp64NP3tNYA52+OKRGv9ytBFtivzCQjrtSxzGqtY5ltdCy3Y8cyI/i/7VkyIi/XuDzHqLtk95K+0sw3PwuBVhPfbumb6X/lm5/VfbOwm13uXB/sT5HYcxoSxKMX+uYWVf/L+2bjeRVXKPwzb9B69Z+2ZX75cj0AbkPMJ+v7PdDok8c223EqeohAGO9tUjJCzQj4v/HKlyYu5jFap68L88iXJe+s7kbw/jespYKMPSQB51YvUU1NvEQ1NSnml2WvHwzyv6qoMslcWFa9k6nlRcVV/iddDryxT5x594MkFly4Ux+KIhEyUDuO6TRtPCW28RovT/A24cYEr4mKmuQ4C7yVoL+VUFCbrOd92GdKwCKXLOm3J1yRtJhcLqBuIvPlFxEn9GZSiMX9UUzHAiSHXN8qYmnbmlW0M6xiByKWNsFsfYRYzcy64uQ18xTBInilwUtH91/qFvG/l/1KzU9w2uEpVw7zNiqCvCQq6E7EsB/JcjFtLSz+8rShxbdC26XtozltrdvISy3puqyxfN6Sphhm6A+YwU9ScSb/YhST1hqKSTesZTugmITEFKQnTlaTki8HaAwqWuKa61vs/mKUMLL5jpntCFbxNMHKYjr2dC5h5RmXsPKAse9asPKkNGPbDtz25c2huRguMIlvW1JwsW2ktGA6Jc8Lx7l3xTqIRHns2Scie76YLOjBCJJH0UvMYLTWWKlfv3eosCgMiXCO6fnvSr4vr94gHPcd/dbNxiTA920SltKz4iesDnAjwYK3XgxWfAW1vJFGJsQy/CQ9wzfSd3wmDoZudxz4BwuPrPBByg6JZVO11dfsKUh6dN5017V9S0b3u65kYGF2VjiclV0otu83Gk6MGHFdTudw27aFXZDWMuEUdx5ipAd3BdhMEtmwBi/G+vO1Hj2t9TAx1Vr1cgJrbeHUGc9G59i8EClWeZeRM+q7aioAI2gqmzD46vWF+X1umnTLDSu7FPQW6e33Tbq+yDtk2qRru1y+jvK/f+9FbqvwHST7PPCddRv4en2ItmnqFb7yotCL21qG87FLuK3i3it+fonY1fj8cCFEZfZco8Zn1MSeakTY4Dt7Ro2o3x7Dvu0J877hk6+7SghtpV21t7fq+7zMdS7zrJvhV1VMhi923FGjvW9c53wHKlH+v76Onz3+bnjnijGfUut7+zS8LwP2wpmNZ+z1YRZw0RP2dNoU0cUqKDbjLiCDTEWS2egGu+k0RnK4kfB5zYg3WKCvab/8msYt7bHH+RlrGqRgeUUqVqzslqiWz/ZDJm1vxiiDXTgT0oX+Qd3/V2vqrDTWDFeO2di5cswhmrN9m/YpfAde0Z/jPS93s+cJYSWmn1EREczhMD4KQBUtoVCzpwvFxZ4uZJSJ8UkHism4w87beBegAQXwZ9dSKi8l55euZ//pOjGBrKUNrIYUIFQxxVyYTZ8XN8cEJ+jCYrXPCReVPOE6pXCd31teR+FCxqWarkPxOkapqrSVyhTb002Asd4TD4KHhXwyBwnOMB6dptjCqszjhGItoTlWO8Na2PpIxmcpshP4GEUeM8YaR44VeyHtC5TcOpWTsP4JMvImABdTc7F+lIodjvhQJJc9zSWXWLAThLVRlGOHZg9pseNDWuzGQ1p+nfzGNL197WAPabFjr3rn6bq951j6aXPVxEFamKe4XDVOlwPST/izWfoJ5zD9hICGqactzulq1o/OYNVWfbQyiOOV5ILxSvavecbVk9700ksvUedXxZN7W7pM6br5bS4YPYo/724qLu9s6XJf96+0U5yvbGNZ1mkadDnHuTw/vpUDf3rePCHLY50u2uZ3jx6HRvHPCNew+3X8pFKvjELOh0+w1MMR3/iAL3zWjtnpgfScRSapzng+W+t38qArAA2o9evRy+/C2bpaZ1P0ciG6tdoNPBVgD+iB7M0D/+Aohw/yJnkUnbfiBtpx5CZp65C/SM+HX5TE8f36ae3pP7T2XKI2lFZHf6BzqTaPPka1qUyPEPh1Zc/UIJ3kgIzH597+f+LPPhMAAHjaY2BkYGAAYqY1CuLx/DZfGeQ5GEDgHDPraRj9v/efIdsr9gQgl4OBCSQKAP2qCgwAAAB42mNgZGDgSPq7Fkgy/O/9f4rtFQNQBAUsBACcywcFAHjaNZJNSFRRGIafc853Z2rTohZu+lGiAknINv1trKZFP0ZWmxorNf8ycVqMkDpQlJQLIxCCEjWzRCmScBEExmyCpEXRrqBlizLJKGpr771Ni4f3fOec7573e7l+kcwKwP0s8ZYxf4Qr9of9luNytECXLZJ19eT9VQb9IKtDC+usn8NugBP+ENXuK1OhivX2mJvqmRM50S4OiBlxV9SKZnHKzTLsntNhZdrr445tohAmqEsfpdeWKbffFKMK+qMaijYiRlX3MBRNU/SVfLQ2jkdrtb+DYmpJZzOiiYL9kp6nEGXk4Z3eeklVdJYpW6I8Xcku+8Ie+0SFzXPOfeNh2MI2KeEktSGP8wc5Y7W0WZ5ReWqU5mwD9f4B+6xb6zxj7j1P3eflW+E79+N1ukyzaV9kkz71+Beq19Dlp9msejgssDW1ir3S7WKjOO0fkXGvmJWujHq5HWdvWc0/pNxfUxWKTKRauBgm6YszTnXQ6mvI615TGOdaktNIksebePYEzZrMG88g326eeyVfMcMxSU6qk3uxt0uMy8OTUKA1PIN0g/Ioqe/W//BB7P4Hi9IeabvO5Ok/0Q0mU9cZcJ36T2IayfpmcUHU6a0K5uI+30inaIm/adUcsx802E74C0holcIAAAB42mNgYNCBwjCGPsYCxj9MM5iNmMOYW5g3sXCx+LAUsPSxrGM5xirE6sC6hM2ErYFdjL2NfR+HA8cWjjucPJwqnG6ccZzHuPq4DnHrcE/ivsTDx+PCs4PnAy8fbxDvBN5tfGx8TnxT+G7w2/AvEZAT8BPoEtgkaCWYIzhH8JTgNyEeIRuhOKEKoRnCQcLbRKRE6kTuieqJrhH9IiYnFie2QGyXuJZ4kfgBCQWJFok9knaSfZLXJP9JTZM6Ic0ibSTdIb1E+peMDxDuk3WQXSJ7Ra5OboHcOvks+Qny5+Q/KegplCjMU/ilmKO4RUlA6Zqyk3KO8hEVE5UOlW+qKarn1NTUOtQ2qf1Td8EBg9QT1PPU29TnqR9Sf6bBoeGkUaOxTeODxgdNEU0rIPymFaeVBQDd1FqqAAAAAQAAAKEARAAFAAAAAAACAAEAAgAWAAABAAFRAAAAAHjadVLLSsNQED1Jq9IaRYuULoMLV22aVhGJIBVfWIoLLRbETfqyxT4kjYh7P8OvcVV/QvwUT26mNSlKuJMzcydnzswEQAZfSEBLpgAc8YRYg0EvxDrSqApOwEZdcBI5vAleQh7vgpcZnwpeQQXfglMwNFPwKra0vGADO1pF8Bruta7gddS1D8EbMPSs4E2k9W3BGeT0Gc8UWf1U8Cds/Q7nGGMEHybacPl2iVqMPeEVHvp4QE/dXjA2pjdAh16ZPZZorxlr8vg8tXn2LNdhZjTDjOQ4wmLj4N+cW9byMKEfaDRZ0eKxVe092sO5kt0YRyHCEefuk81UPfpkdtlzB0O+PTwyNkZ3oVMr5sVvgikNccIqnuL1aV2lM6wZaPcZD7QHelqMjOh3WNXEM3Fb5QRaemqqx5y6y7zQi3+TZ2RxHmWqsFWXPr90UOTzoh6LPL9cFvM96i5SeZRzwkgNl+zhDFe4oS0I5997/W9PDXI1ObvZn1RSHA3ptMpeBypq0wb7drivfdoy8XyDP0JQfA542m3Ou0+TcRTG8e+hpTcol9JSoCqKIiqI71taCqJCtS3ekIsWARVoUmxrgDaFd2hiTEx0AXVkZ1Q3Edlw0cHEwcEBBv1XlNLfAAnP8slzknNyKGM//56R5Kisg5SJCRNmyrFgxYYdBxVU4qSKamqoxUUdbjzU46WBRprwcYzjnKCZk5yihdOcoZWztHGO81ygnQ4u0sklNHT8dBEgSDcheujlMn1c4SrX6GeAMNe5QYQoMQa5yS1uc4e7DHGPYUYYZYz7PCDOOA+ZYJIpHvGYJ0wzwywJMfOK16zxjlXeSzkrvOUvH/jBHD/5RYrfpMmQY5kCz3nBS7GIVWxiZ4c/7IpDKqRSnFIl1VIjteKSOnGLR+rFyyc2+MIW3/jMJt/5KA1s81UapYk34rOk5gu5tG41FjOapkVKhjVlxDmcNhZTibyxMJ8wlp3ZQy1+qBkHW3Hfv3dQqSv9yi5lQBlUditDyh5lrzJcUld3dd3xNJMy8nPJxFK6NPLHSgZj5qiRzxZLdO+P/+/adfZ42j3OKRLCQBAF0Bkm+0JWE0Ex6LkCksTEUKikiuIGWCwYcHABOEQHReE5BYcJHWjG9fst/n/w/gj8zGpwlk3H+aXtKks1M4jbGvIVHod2ApZaNwyELEGoBRiyvItipL4wEcaUYMnyyUy+ZWQbn9ab4CDsF8FFODeCh3CvBB/hnQgBwq8IISL4V40RofyBQ0TTUkwj7OhEtUMmyHSjGSOTuWY2rI32PdNJPiQZL3TSQq4+STRSagAAAAFR3VVMAAA=) format('woff'); +} \ No newline at end of file diff --git a/plugins/UiConfig/media/img/loading.gif b/plugins/UiConfig/media/img/loading.gif new file mode 100644 index 000000000..27d0aa810 Binary files /dev/null and b/plugins/UiConfig/media/img/loading.gif differ diff --git a/plugins/UiConfig/media/js/ConfigStorage.coffee b/plugins/UiConfig/media/js/ConfigStorage.coffee new file mode 100644 index 000000000..b1c6e4fd5 --- /dev/null +++ b/plugins/UiConfig/media/js/ConfigStorage.coffee @@ -0,0 +1,152 @@ +class ConfigStorage extends Class + constructor: (@config) -> + @items = [] + @createSections() + @setValues(@config) + + setValues: (values) -> + for section in @items + for item in section.items + if not values[item.key] + continue + item.value = @formatValue(values[item.key].value) + item.default = @formatValue(values[item.key].default) + item.pending = values[item.key].pending + values[item.key].item = item + + formatValue: (value) -> + if not value + return false + else if typeof(value) == "object" + return value.join("\n") + else if typeof(value) == "number" + return value.toString() + else + return value + + deformatValue: (value, type) -> + if type == "object" and typeof(value) == "string" + if not value.length + return value = null + else + return value.split("\n") + if type == "boolean" and not value + return false + else + return value + + createSections: -> + # Web Interface + section = @createSection("Web Interface") + + section.items.push + key: "open_browser" + title: "Open web browser on ZeroNet startup" + type: "checkbox" + + # Network + section = @createSection("Network") + + section.items.push + key: "fileserver_ip_type" + title: "File server network" + type: "select" + options: [ + {title: "IPv4", value: "ipv4"} + {title: "IPv6", value: "ipv6"} + {title: "Dual (IPv4 & IPv6)", value: "dual"} + ] + description: "Accept incoming peers using IPv4 or IPv6 address. (default: dual)" + + section.items.push + key: "fileserver_port" + title: "File server port" + type: "text" + valid_pattern: /[0-9]*/ + description: "Other peers will use this port to reach your served sites. (default: 15441)" + + section.items.push + key: "ip_external" + title: "File server external ip" + type: "textarea" + placeholder: "Detect automatically" + description: "Your file server is accessible on these ips. (default: detect automatically)" + + section.items.push + title: "Tor" + key: "tor" + type: "select" + options: [ + {title: "Disable", value: "disable"} + {title: "Enable", value: "enable"} + {title: "Always", value: "always"} + ] + description: [ + "Disable: Don't connect to peers on Tor network", h("br"), + "Enable: Only use Tor for Tor network peers", h("br"), + "Always: Use Tor for every connections to hide your IP address (slower)" + ] + + section.items.push + title: "Use Tor bridges" + key: "tor_use_bridges" + type: "checkbox" + description: "Use obfuscated bridge relays to avoid network level Tor block (even slower)" + isHidden: -> + return not Page.server_info.tor_has_meek_bridges + + section.items.push + title: "Trackers" + key: "trackers" + type: "textarea" + description: "Discover new peers using these adresses" + + section.items.push + title: "Trackers files" + key: "trackers_file" + type: "text" + description: "Load additional list of torrent trackers dynamically, from a file" + placeholder: "Eg.: data/trackers.json" + value_pos: "fullwidth" + + section.items.push + title: "Proxy for tracker connections" + key: "trackers_proxy" + type: "select" + options: [ + {title: "Custom", value: ""} + {title: "Tor", value: "tor"} + {title: "Disable", value: "disable"} + ] + + section.items.push + title: "Custom socks proxy address for trackers" + key: "trackers_proxy" + type: "text" + placeholder: "Eg.: 127.0.0.1:1080" + value_pos: "fullwidth" + valid_pattern: /.+:[0-9]+/ + isHidden: => + Page.values["trackers_proxy"] in ["tor", "disable"] + + # Performance + section = @createSection("Performance") + + section.items.push + key: "log_level" + title: "Level of logging to file" + type: "select" + options: [ + {title: "Everything", value: "DEBUG"} + {title: "Only important messages", value: "INFO"} + {title: "Only errors", value: "ERROR"} + ] + + createSection: (title) => + section = {} + section.title = title + section.items = [] + @items.push(section) + return section + +window.ConfigStorage = ConfigStorage \ No newline at end of file diff --git a/plugins/UiConfig/media/js/ConfigView.coffee b/plugins/UiConfig/media/js/ConfigView.coffee new file mode 100644 index 000000000..a110a17da --- /dev/null +++ b/plugins/UiConfig/media/js/ConfigView.coffee @@ -0,0 +1,124 @@ +class ConfigView extends Class + constructor: () -> + @ + + render: -> + @config_storage.items.map @renderSection + + renderSection: (section) => + h("div.section", {key: section.title}, [ + h("h2", section.title), + h("div.config-items", section.items.map @renderSectionItem) + ]) + + handleResetClick: (e) => + node = e.currentTarget + config_key = node.attributes.config_key.value + default_value = node.attributes.default_value?.value + Page.cmd "wrapperConfirm", ["Reset #{config_key} value?", "Reset to default"], (res) => + if (res) + @values[config_key] = default_value + Page.projector.scheduleRender() + + renderSectionItem: (item) => + value_pos = item.value_pos + + if item.type == "textarea" + value_pos ?= "fullwidth" + else + value_pos ?= "right" + + value_changed = @config_storage.formatValue(@values[item.key]) != item.value + value_default = @config_storage.formatValue(@values[item.key]) == item.default + + if item.key in ["open_browser", "fileserver_port"] # Value default for some settings makes no sense + value_default = true + + marker_title = "Changed from default value: #{item.default} -> #{@values[item.key]}" + if item.pending + marker_title += " (change pending until client restart)" + + if item.isHidden?() + return null + + h("div.config-item", {key: item.title, enterAnimation: Animation.slideDown, exitAnimation: Animation.slideUpInout}, [ + h("div.title", [ + h("h3", item.title), + h("div.description", item.description) + ]) + h("div.value.value-#{value_pos}", + if item.type == "select" + @renderValueSelect(item) + else if item.type == "checkbox" + @renderValueCheckbox(item) + else if item.type == "textarea" + @renderValueTextarea(item) + else + @renderValueText(item) + h("a.marker", { + href: "#Reset", title: marker_title, + onclick: @handleResetClick, config_key: item.key, default_value: item.default, + classes: {default: value_default, changed: value_changed, visible: not value_default or value_changed or item.pending, pending: item.pending} + }, "\u2022") + ) + ]) + + # Values + handleInputChange: (e) => + node = e.target + config_key = node.attributes.config_key.value + @values[config_key] = node.value + Page.projector.scheduleRender() + + handleCheckboxChange: (e) => + node = e.currentTarget + config_key = node.attributes.config_key.value + value = not node.classList.contains("checked") + @values[config_key] = value + Page.projector.scheduleRender() + + renderValueText: (item) => + value = @values[item.key] + if not value + value = "" + h("input.input-#{item.type}", {type: item.type, config_key: item.key, value: value, placeholder: item.placeholder, oninput: @handleInputChange}) + + autosizeTextarea: (e) => + if e.currentTarget + # @handleInputChange(e) + node = e.currentTarget + else + node = e + height_before = node.style.height + if height_before + node.style.height = "0px" + h = node.offsetHeight + scrollh = node.scrollHeight + 20 + if scrollh > h + node.style.height = scrollh + "px" + else + node.style.height = height_before + + renderValueTextarea: (item) => + value = @values[item.key] + if not value + value = "" + h("textarea.input-#{item.type}.input-text",{ + type: item.type, config_key: item.key, oninput: @handleInputChange, afterCreate: @autosizeTextarea, + updateAnimation: @autosizeTextarea, value: value, placeholder: item.placeholder + }) + + renderValueCheckbox: (item) => + if @values[item.key] and @values[item.key] != "False" + checked = true + else + checked = false + h("div.checkbox", {onclick: @handleCheckboxChange, config_key: item.key, classes: {checked: checked}}, h("div.checkbox-skin")) + + renderValueSelect: (item) => + h("select.input-select", {config_key: item.key, oninput: @handleInputChange}, + item.options.map (option) => + h("option", {selected: option.value == @values[item.key], value: option.value}, option.title) + ) + +window.ConfigView = ConfigView \ No newline at end of file diff --git a/plugins/UiConfig/media/js/UiConfig.coffee b/plugins/UiConfig/media/js/UiConfig.coffee new file mode 100644 index 000000000..4ee3a1c6c --- /dev/null +++ b/plugins/UiConfig/media/js/UiConfig.coffee @@ -0,0 +1,127 @@ +window.h = maquette.h + +class UiConfig extends ZeroFrame + init: -> + @save_visible = true + @config = null # Setting currently set on the server + @values = null # Entered values on the page + @config_view = new ConfigView() + window.onbeforeunload = => + if @getValuesChanged().length > 0 + return true + else + return null + + onOpenWebsocket: => + @cmd("wrapperSetTitle", "Config - ZeroNet") + @cmd "serverInfo", {}, (server_info) => + @server_info = server_info + @restart_loading = false + @updateConfig() + + updateConfig: (cb) => + @cmd "configList", [], (res) => + @config = res + @values = {} + @config_storage = new ConfigStorage(@config) + @config_view.values = @values + @config_view.config_storage = @config_storage + for key, item of res + value = item.value + @values[key] = @config_storage.formatValue(value) + @projector.scheduleRender() + cb?() + + createProjector: => + @projector = maquette.createProjector() + @projector.replace($("#content"), @render) + @projector.replace($("#bottom-save"), @renderBottomSave) + @projector.replace($("#bottom-restart"), @renderBottomRestart) + + getValuesChanged: => + values_changed = [] + for key, value of @values + if @config_storage.formatValue(value) != @config_storage.formatValue(@config[key]?.value) + values_changed.push({key: key, value: value}) + return values_changed + + getValuesPending: => + values_pending = [] + for key, item of @config + if item.pending + values_pending.push(key) + return values_pending + + saveValues: (cb) => + changed_values = @getValuesChanged() + for item, i in changed_values + last = i == changed_values.length - 1 + value = @config_storage.deformatValue(item.value, typeof(@config[item.key].default)) + value_same_as_default = JSON.stringify(@config[item.key].default) == JSON.stringify(value) + if value_same_as_default + value = null + + if @config[item.key].item.valid_pattern and not @config[item.key].item.isHidden?() + match = value.match(@config[item.key].item.valid_pattern) + if not match or match[0] != value + message = "Invalid value of #{@config[item.key].item.title}: #{value} (does not matches #{@config[item.key].item.valid_pattern})" + Page.cmd("wrapperNotification", ["error", message]) + cb(false) + break + + @saveValue(item.key, value, if last then cb else null) + + saveValue: (key, value, cb) => + if key == "open_browser" + if value + value = "default_browser" + else + value = "False" + + Page.cmd "configSet", [key, value], (res) => + if res != "ok" + Page.cmd "wrapperNotification", ["error", res.error] + cb?(true) + + render: => + if not @config + return h("div.content") + + h("div.content", [ + @config_view.render() + ]) + + handleSaveClick: => + @save_loading = true + @logStart "Save" + @saveValues (success) => + @save_loading = false + @logEnd "Save" + if success + @updateConfig() + Page.projector.scheduleRender() + return false + + renderBottomSave: => + values_changed = @getValuesChanged() + h("div.bottom.bottom-save", {classes: {visible: values_changed.length}}, h("div.bottom-content", [ + h("div.title", "#{values_changed.length} configuration item value changed"), + h("a.button.button-submit.button-save", {href: "#Save", classes: {loading: @save_loading}, onclick: @handleSaveClick}, "Save settings") + ])) + + handleRestartClick: => + @restart_loading = true + Page.cmd("serverShutdown", {restart: true}) + Page.projector.scheduleRender() + return false + + renderBottomRestart: => + values_pending = @getValuesPending() + values_changed = @getValuesChanged() + h("div.bottom.bottom-restart", {classes: {visible: values_pending.length and not values_changed.length}}, h("div.bottom-content", [ + h("div.title", "Some changed settings requires restart"), + h("a.button.button-submit.button-restart", {href: "#Restart", classes: {loading: @restart_loading}, onclick: @handleRestartClick}, "Restart ZeroNet client") + ])) + +window.Page = new UiConfig() +window.Page.createProjector() diff --git a/plugins/UiConfig/media/js/all.js b/plugins/UiConfig/media/js/all.js new file mode 100644 index 000000000..0d0f2c3d1 --- /dev/null +++ b/plugins/UiConfig/media/js/all.js @@ -0,0 +1,1941 @@ + + +/* ---- plugins/UiConfig/media/js/lib/Class.coffee ---- */ + + +(function() { + var Class, + slice = [].slice; + + Class = (function() { + function Class() {} + + Class.prototype.trace = true; + + Class.prototype.log = function() { + var args; + args = 1 <= arguments.length ? slice.call(arguments, 0) : []; + if (!this.trace) { + return; + } + if (typeof console === 'undefined') { + return; + } + args.unshift("[" + this.constructor.name + "]"); + console.log.apply(console, args); + return this; + }; + + Class.prototype.logStart = function() { + var args, name; + name = arguments[0], args = 2 <= arguments.length ? slice.call(arguments, 1) : []; + if (!this.trace) { + return; + } + this.logtimers || (this.logtimers = {}); + this.logtimers[name] = +(new Date); + if (args.length > 0) { + this.log.apply(this, ["" + name].concat(slice.call(args), ["(started)"])); + } + return this; + }; + + Class.prototype.logEnd = function() { + var args, ms, name; + name = arguments[0], args = 2 <= arguments.length ? slice.call(arguments, 1) : []; + ms = +(new Date) - this.logtimers[name]; + this.log.apply(this, ["" + name].concat(slice.call(args), ["(Done in " + ms + "ms)"])); + return this; + }; + + return Class; + + })(); + + window.Class = Class; + +}).call(this); + + +/* ---- plugins/UiConfig/media/js/lib/Promise.coffee ---- */ + + +(function() { + var Promise, + slice = [].slice; + + Promise = (function() { + Promise.when = function() { + var args, fn, i, len, num_uncompleted, promise, task, task_id, tasks; + tasks = 1 <= arguments.length ? slice.call(arguments, 0) : []; + num_uncompleted = tasks.length; + args = new Array(num_uncompleted); + promise = new Promise(); + fn = function(task_id) { + return task.then(function() { + args[task_id] = Array.prototype.slice.call(arguments); + num_uncompleted--; + if (num_uncompleted === 0) { + return promise.complete.apply(promise, args); + } + }); + }; + for (task_id = i = 0, len = tasks.length; i < len; task_id = ++i) { + task = tasks[task_id]; + fn(task_id); + } + return promise; + }; + + function Promise() { + this.resolved = false; + this.end_promise = null; + this.result = null; + this.callbacks = []; + } + + Promise.prototype.resolve = function() { + var back, callback, i, len, ref; + if (this.resolved) { + return false; + } + this.resolved = true; + this.data = arguments; + if (!arguments.length) { + this.data = [true]; + } + this.result = this.data[0]; + ref = this.callbacks; + for (i = 0, len = ref.length; i < len; i++) { + callback = ref[i]; + back = callback.apply(callback, this.data); + } + if (this.end_promise) { + return this.end_promise.resolve(back); + } + }; + + Promise.prototype.fail = function() { + return this.resolve(false); + }; + + Promise.prototype.then = function(callback) { + if (this.resolved === true) { + callback.apply(callback, this.data); + return; + } + this.callbacks.push(callback); + return this.end_promise = new Promise(); + }; + + return Promise; + + })(); + + window.Promise = Promise; + + + /* + s = Date.now() + log = (text) -> + console.log Date.now()-s, Array.prototype.slice.call(arguments).join(", ") + + log "Started" + + cmd = (query) -> + p = new Promise() + setTimeout ( -> + p.resolve query+" Result" + ), 100 + return p + + back = cmd("SELECT * FROM message").then (res) -> + log res + return "Return from query" + .then (res) -> + log "Back then", res + + log "Query started", back + */ + +}).call(this); + + +/* ---- plugins/UiConfig/media/js/lib/Prototypes.coffee ---- */ + + +(function() { + String.prototype.startsWith = function(s) { + return this.slice(0, s.length) === s; + }; + + String.prototype.endsWith = function(s) { + return s === '' || this.slice(-s.length) === s; + }; + + String.prototype.repeat = function(count) { + return new Array(count + 1).join(this); + }; + + window.isEmpty = function(obj) { + var key; + for (key in obj) { + return false; + } + return true; + }; + +}).call(this); + + +/* ---- plugins/UiConfig/media/js/lib/maquette.js ---- */ + + +(function (root, factory) { + if (typeof define === 'function' && define.amd) { + // AMD. Register as an anonymous module. + define(['exports'], factory); + } else if (typeof exports === 'object' && typeof exports.nodeName !== 'string') { + // CommonJS + factory(exports); + } else { + // Browser globals + factory(root.maquette = {}); + } +}(this, function (exports) { + 'use strict'; + ; + ; + ; + ; + var NAMESPACE_W3 = 'http://www.w3.org/'; + var NAMESPACE_SVG = NAMESPACE_W3 + '2000/svg'; + var NAMESPACE_XLINK = NAMESPACE_W3 + '1999/xlink'; + // Utilities + var emptyArray = []; + var extend = function (base, overrides) { + var result = {}; + Object.keys(base).forEach(function (key) { + result[key] = base[key]; + }); + if (overrides) { + Object.keys(overrides).forEach(function (key) { + result[key] = overrides[key]; + }); + } + return result; + }; + // Hyperscript helper functions + var same = function (vnode1, vnode2) { + if (vnode1.vnodeSelector !== vnode2.vnodeSelector) { + return false; + } + if (vnode1.properties && vnode2.properties) { + if (vnode1.properties.key !== vnode2.properties.key) { + return false; + } + return vnode1.properties.bind === vnode2.properties.bind; + } + return !vnode1.properties && !vnode2.properties; + }; + var toTextVNode = function (data) { + return { + vnodeSelector: '', + properties: undefined, + children: undefined, + text: data.toString(), + domNode: null + }; + }; + var appendChildren = function (parentSelector, insertions, main) { + for (var i = 0; i < insertions.length; i++) { + var item = insertions[i]; + if (Array.isArray(item)) { + appendChildren(parentSelector, item, main); + } else { + if (item !== null && item !== undefined) { + if (!item.hasOwnProperty('vnodeSelector')) { + item = toTextVNode(item); + } + main.push(item); + } + } + } + }; + // Render helper functions + var missingTransition = function () { + throw new Error('Provide a transitions object to the projectionOptions to do animations'); + }; + var DEFAULT_PROJECTION_OPTIONS = { + namespace: undefined, + eventHandlerInterceptor: undefined, + styleApplyer: function (domNode, styleName, value) { + // Provides a hook to add vendor prefixes for browsers that still need it. + domNode.style[styleName] = value; + }, + transitions: { + enter: missingTransition, + exit: missingTransition + } + }; + var applyDefaultProjectionOptions = function (projectorOptions) { + return extend(DEFAULT_PROJECTION_OPTIONS, projectorOptions); + }; + var checkStyleValue = function (styleValue) { + if (typeof styleValue !== 'string') { + throw new Error('Style values must be strings'); + } + }; + var setProperties = function (domNode, properties, projectionOptions) { + if (!properties) { + return; + } + var eventHandlerInterceptor = projectionOptions.eventHandlerInterceptor; + var propNames = Object.keys(properties); + var propCount = propNames.length; + for (var i = 0; i < propCount; i++) { + var propName = propNames[i]; + /* tslint:disable:no-var-keyword: edge case */ + var propValue = properties[propName]; + /* tslint:enable:no-var-keyword */ + if (propName === 'className') { + throw new Error('Property "className" is not supported, use "class".'); + } else if (propName === 'class') { + if (domNode.className) { + // May happen if classes is specified before class + domNode.className += ' ' + propValue; + } else { + domNode.className = propValue; + } + } else if (propName === 'classes') { + // object with string keys and boolean values + var classNames = Object.keys(propValue); + var classNameCount = classNames.length; + for (var j = 0; j < classNameCount; j++) { + var className = classNames[j]; + if (propValue[className]) { + domNode.classList.add(className); + } + } + } else if (propName === 'styles') { + // object with string keys and string (!) values + var styleNames = Object.keys(propValue); + var styleCount = styleNames.length; + for (var j = 0; j < styleCount; j++) { + var styleName = styleNames[j]; + var styleValue = propValue[styleName]; + if (styleValue) { + checkStyleValue(styleValue); + projectionOptions.styleApplyer(domNode, styleName, styleValue); + } + } + } else if (propName === 'key') { + continue; + } else if (propValue === null || propValue === undefined) { + continue; + } else { + var type = typeof propValue; + if (type === 'function') { + if (propName.lastIndexOf('on', 0) === 0) { + if (eventHandlerInterceptor) { + propValue = eventHandlerInterceptor(propName, propValue, domNode, properties); // intercept eventhandlers + } + if (propName === 'oninput') { + (function () { + // record the evt.target.value, because IE and Edge sometimes do a requestAnimationFrame between changing value and running oninput + var oldPropValue = propValue; + propValue = function (evt) { + evt.target['oninput-value'] = evt.target.value; + // may be HTMLTextAreaElement as well + oldPropValue.apply(this, [evt]); + }; + }()); + } + domNode[propName] = propValue; + } + } else if (type === 'string' && propName !== 'value' && propName !== 'innerHTML') { + if (projectionOptions.namespace === NAMESPACE_SVG && propName === 'href') { + domNode.setAttributeNS(NAMESPACE_XLINK, propName, propValue); + } else { + domNode.setAttribute(propName, propValue); + } + } else { + domNode[propName] = propValue; + } + } + } + }; + var updateProperties = function (domNode, previousProperties, properties, projectionOptions) { + if (!properties) { + return; + } + var propertiesUpdated = false; + var propNames = Object.keys(properties); + var propCount = propNames.length; + for (var i = 0; i < propCount; i++) { + var propName = propNames[i]; + // assuming that properties will be nullified instead of missing is by design + var propValue = properties[propName]; + var previousValue = previousProperties[propName]; + if (propName === 'class') { + if (previousValue !== propValue) { + throw new Error('"class" property may not be updated. Use the "classes" property for conditional css classes.'); + } + } else if (propName === 'classes') { + var classList = domNode.classList; + var classNames = Object.keys(propValue); + var classNameCount = classNames.length; + for (var j = 0; j < classNameCount; j++) { + var className = classNames[j]; + var on = !!propValue[className]; + var previousOn = !!previousValue[className]; + if (on === previousOn) { + continue; + } + propertiesUpdated = true; + if (on) { + classList.add(className); + } else { + classList.remove(className); + } + } + } else if (propName === 'styles') { + var styleNames = Object.keys(propValue); + var styleCount = styleNames.length; + for (var j = 0; j < styleCount; j++) { + var styleName = styleNames[j]; + var newStyleValue = propValue[styleName]; + var oldStyleValue = previousValue[styleName]; + if (newStyleValue === oldStyleValue) { + continue; + } + propertiesUpdated = true; + if (newStyleValue) { + checkStyleValue(newStyleValue); + projectionOptions.styleApplyer(domNode, styleName, newStyleValue); + } else { + projectionOptions.styleApplyer(domNode, styleName, ''); + } + } + } else { + if (!propValue && typeof previousValue === 'string') { + propValue = ''; + } + if (propName === 'value') { + if (domNode[propName] !== propValue && domNode['oninput-value'] !== propValue) { + domNode[propName] = propValue; + // Reset the value, even if the virtual DOM did not change + domNode['oninput-value'] = undefined; + } + // else do not update the domNode, otherwise the cursor position would be changed + if (propValue !== previousValue) { + propertiesUpdated = true; + } + } else if (propValue !== previousValue) { + var type = typeof propValue; + if (type === 'function') { + throw new Error('Functions may not be updated on subsequent renders (property: ' + propName + '). Hint: declare event handler functions outside the render() function.'); + } + if (type === 'string' && propName !== 'innerHTML') { + if (projectionOptions.namespace === NAMESPACE_SVG && propName === 'href') { + domNode.setAttributeNS(NAMESPACE_XLINK, propName, propValue); + } else { + domNode.setAttribute(propName, propValue); + } + } else { + if (domNode[propName] !== propValue) { + domNode[propName] = propValue; + } + } + propertiesUpdated = true; + } + } + } + return propertiesUpdated; + }; + var findIndexOfChild = function (children, sameAs, start) { + if (sameAs.vnodeSelector !== '') { + // Never scan for text-nodes + for (var i = start; i < children.length; i++) { + if (same(children[i], sameAs)) { + return i; + } + } + } + return -1; + }; + var nodeAdded = function (vNode, transitions) { + if (vNode.properties) { + var enterAnimation = vNode.properties.enterAnimation; + if (enterAnimation) { + if (typeof enterAnimation === 'function') { + enterAnimation(vNode.domNode, vNode.properties); + } else { + transitions.enter(vNode.domNode, vNode.properties, enterAnimation); + } + } + } + }; + var nodeToRemove = function (vNode, transitions) { + var domNode = vNode.domNode; + if (vNode.properties) { + var exitAnimation = vNode.properties.exitAnimation; + if (exitAnimation) { + domNode.style.pointerEvents = 'none'; + var removeDomNode = function () { + if (domNode.parentNode) { + domNode.parentNode.removeChild(domNode); + } + }; + if (typeof exitAnimation === 'function') { + exitAnimation(domNode, removeDomNode, vNode.properties); + return; + } else { + transitions.exit(vNode.domNode, vNode.properties, exitAnimation, removeDomNode); + return; + } + } + } + if (domNode.parentNode) { + domNode.parentNode.removeChild(domNode); + } + }; + var checkDistinguishable = function (childNodes, indexToCheck, parentVNode, operation) { + var childNode = childNodes[indexToCheck]; + if (childNode.vnodeSelector === '') { + return; // Text nodes need not be distinguishable + } + var properties = childNode.properties; + var key = properties ? properties.key === undefined ? properties.bind : properties.key : undefined; + if (!key) { + for (var i = 0; i < childNodes.length; i++) { + if (i !== indexToCheck) { + var node = childNodes[i]; + if (same(node, childNode)) { + if (operation === 'added') { + throw new Error(parentVNode.vnodeSelector + ' had a ' + childNode.vnodeSelector + ' child ' + 'added, but there is now more than one. You must add unique key properties to make them distinguishable.'); + } else { + throw new Error(parentVNode.vnodeSelector + ' had a ' + childNode.vnodeSelector + ' child ' + 'removed, but there were more than one. You must add unique key properties to make them distinguishable.'); + } + } + } + } + } + }; + var createDom; + var updateDom; + var updateChildren = function (vnode, domNode, oldChildren, newChildren, projectionOptions) { + if (oldChildren === newChildren) { + return false; + } + oldChildren = oldChildren || emptyArray; + newChildren = newChildren || emptyArray; + var oldChildrenLength = oldChildren.length; + var newChildrenLength = newChildren.length; + var transitions = projectionOptions.transitions; + var oldIndex = 0; + var newIndex = 0; + var i; + var textUpdated = false; + while (newIndex < newChildrenLength) { + var oldChild = oldIndex < oldChildrenLength ? oldChildren[oldIndex] : undefined; + var newChild = newChildren[newIndex]; + if (oldChild !== undefined && same(oldChild, newChild)) { + textUpdated = updateDom(oldChild, newChild, projectionOptions) || textUpdated; + oldIndex++; + } else { + var findOldIndex = findIndexOfChild(oldChildren, newChild, oldIndex + 1); + if (findOldIndex >= 0) { + // Remove preceding missing children + for (i = oldIndex; i < findOldIndex; i++) { + nodeToRemove(oldChildren[i], transitions); + checkDistinguishable(oldChildren, i, vnode, 'removed'); + } + textUpdated = updateDom(oldChildren[findOldIndex], newChild, projectionOptions) || textUpdated; + oldIndex = findOldIndex + 1; + } else { + // New child + createDom(newChild, domNode, oldIndex < oldChildrenLength ? oldChildren[oldIndex].domNode : undefined, projectionOptions); + nodeAdded(newChild, transitions); + checkDistinguishable(newChildren, newIndex, vnode, 'added'); + } + } + newIndex++; + } + if (oldChildrenLength > oldIndex) { + // Remove child fragments + for (i = oldIndex; i < oldChildrenLength; i++) { + nodeToRemove(oldChildren[i], transitions); + checkDistinguishable(oldChildren, i, vnode, 'removed'); + } + } + return textUpdated; + }; + var addChildren = function (domNode, children, projectionOptions) { + if (!children) { + return; + } + for (var i = 0; i < children.length; i++) { + createDom(children[i], domNode, undefined, projectionOptions); + } + }; + var initPropertiesAndChildren = function (domNode, vnode, projectionOptions) { + addChildren(domNode, vnode.children, projectionOptions); + // children before properties, needed for value property of . + if (vnode.text) { + domNode.textContent = vnode.text; + } + setProperties(domNode, vnode.properties, projectionOptions); + if (vnode.properties && vnode.properties.afterCreate) { + vnode.properties.afterCreate(domNode, projectionOptions, vnode.vnodeSelector, vnode.properties, vnode.children); + } + }; + createDom = function (vnode, parentNode, insertBefore, projectionOptions) { + var domNode, i, c, start = 0, type, found; + var vnodeSelector = vnode.vnodeSelector; + if (vnodeSelector === '') { + domNode = vnode.domNode = document.createTextNode(vnode.text); + if (insertBefore !== undefined) { + parentNode.insertBefore(domNode, insertBefore); + } else { + parentNode.appendChild(domNode); + } + } else { + for (i = 0; i <= vnodeSelector.length; ++i) { + c = vnodeSelector.charAt(i); + if (i === vnodeSelector.length || c === '.' || c === '#') { + type = vnodeSelector.charAt(start - 1); + found = vnodeSelector.slice(start, i); + if (type === '.') { + domNode.classList.add(found); + } else if (type === '#') { + domNode.id = found; + } else { + if (found === 'svg') { + projectionOptions = extend(projectionOptions, { namespace: NAMESPACE_SVG }); + } + if (projectionOptions.namespace !== undefined) { + domNode = vnode.domNode = document.createElementNS(projectionOptions.namespace, found); + } else { + domNode = vnode.domNode = document.createElement(found); + } + if (insertBefore !== undefined) { + parentNode.insertBefore(domNode, insertBefore); + } else { + parentNode.appendChild(domNode); + } + } + start = i + 1; + } + } + initPropertiesAndChildren(domNode, vnode, projectionOptions); + } + }; + updateDom = function (previous, vnode, projectionOptions) { + var domNode = previous.domNode; + var textUpdated = false; + if (previous === vnode) { + return false; // By contract, VNode objects may not be modified anymore after passing them to maquette + } + var updated = false; + if (vnode.vnodeSelector === '') { + if (vnode.text !== previous.text) { + var newVNode = document.createTextNode(vnode.text); + domNode.parentNode.replaceChild(newVNode, domNode); + vnode.domNode = newVNode; + textUpdated = true; + return textUpdated; + } + } else { + if (vnode.vnodeSelector.lastIndexOf('svg', 0) === 0) { + projectionOptions = extend(projectionOptions, { namespace: NAMESPACE_SVG }); + } + if (previous.text !== vnode.text) { + updated = true; + if (vnode.text === undefined) { + domNode.removeChild(domNode.firstChild); // the only textnode presumably + } else { + domNode.textContent = vnode.text; + } + } + updated = updateChildren(vnode, domNode, previous.children, vnode.children, projectionOptions) || updated; + updated = updateProperties(domNode, previous.properties, vnode.properties, projectionOptions) || updated; + if (vnode.properties && vnode.properties.afterUpdate) { + vnode.properties.afterUpdate(domNode, projectionOptions, vnode.vnodeSelector, vnode.properties, vnode.children); + } + } + if (updated && vnode.properties && vnode.properties.updateAnimation) { + vnode.properties.updateAnimation(domNode, vnode.properties, previous.properties); + } + vnode.domNode = previous.domNode; + return textUpdated; + }; + var createProjection = function (vnode, projectionOptions) { + return { + update: function (updatedVnode) { + if (vnode.vnodeSelector !== updatedVnode.vnodeSelector) { + throw new Error('The selector for the root VNode may not be changed. (consider using dom.merge and add one extra level to the virtual DOM)'); + } + updateDom(vnode, updatedVnode, projectionOptions); + vnode = updatedVnode; + }, + domNode: vnode.domNode + }; + }; + ; + // The other two parameters are not added here, because the Typescript compiler creates surrogate code for desctructuring 'children'. + exports.h = function (selector) { + var properties = arguments[1]; + if (typeof selector !== 'string') { + throw new Error(); + } + var childIndex = 1; + if (properties && !properties.hasOwnProperty('vnodeSelector') && !Array.isArray(properties) && typeof properties === 'object') { + childIndex = 2; + } else { + // Optional properties argument was omitted + properties = undefined; + } + var text = undefined; + var children = undefined; + var argsLength = arguments.length; + // Recognize a common special case where there is only a single text node + if (argsLength === childIndex + 1) { + var onlyChild = arguments[childIndex]; + if (typeof onlyChild === 'string') { + text = onlyChild; + } else if (onlyChild !== undefined && onlyChild.length === 1 && typeof onlyChild[0] === 'string') { + text = onlyChild[0]; + } + } + if (text === undefined) { + children = []; + for (; childIndex < arguments.length; childIndex++) { + var child = arguments[childIndex]; + if (child === null || child === undefined) { + continue; + } else if (Array.isArray(child)) { + appendChildren(selector, child, children); + } else if (child.hasOwnProperty('vnodeSelector')) { + children.push(child); + } else { + children.push(toTextVNode(child)); + } + } + } + return { + vnodeSelector: selector, + properties: properties, + children: children, + text: text === '' ? undefined : text, + domNode: null + }; + }; + /** + * Contains simple low-level utility functions to manipulate the real DOM. + */ + exports.dom = { + /** + * Creates a real DOM tree from `vnode`. The [[Projection]] object returned will contain the resulting DOM Node in + * its [[Projection.domNode|domNode]] property. + * This is a low-level method. Users wil typically use a [[Projector]] instead. + * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. NOTE: [[VNode]] + * objects may only be rendered once. + * @param projectionOptions - Options to be used to create and update the projection. + * @returns The [[Projection]] which also contains the DOM Node that was created. + */ + create: function (vnode, projectionOptions) { + projectionOptions = applyDefaultProjectionOptions(projectionOptions); + createDom(vnode, document.createElement('div'), undefined, projectionOptions); + return createProjection(vnode, projectionOptions); + }, + /** + * Appends a new childnode to the DOM which is generated from a [[VNode]]. + * This is a low-level method. Users wil typically use a [[Projector]] instead. + * @param parentNode - The parent node for the new childNode. + * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. NOTE: [[VNode]] + * objects may only be rendered once. + * @param projectionOptions - Options to be used to create and update the [[Projection]]. + * @returns The [[Projection]] that was created. + */ + append: function (parentNode, vnode, projectionOptions) { + projectionOptions = applyDefaultProjectionOptions(projectionOptions); + createDom(vnode, parentNode, undefined, projectionOptions); + return createProjection(vnode, projectionOptions); + }, + /** + * Inserts a new DOM node which is generated from a [[VNode]]. + * This is a low-level method. Users wil typically use a [[Projector]] instead. + * @param beforeNode - The node that the DOM Node is inserted before. + * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. + * NOTE: [[VNode]] objects may only be rendered once. + * @param projectionOptions - Options to be used to create and update the projection, see [[createProjector]]. + * @returns The [[Projection]] that was created. + */ + insertBefore: function (beforeNode, vnode, projectionOptions) { + projectionOptions = applyDefaultProjectionOptions(projectionOptions); + createDom(vnode, beforeNode.parentNode, beforeNode, projectionOptions); + return createProjection(vnode, projectionOptions); + }, + /** + * Merges a new DOM node which is generated from a [[VNode]] with an existing DOM Node. + * This means that the virtual DOM and the real DOM will have one overlapping element. + * Therefore the selector for the root [[VNode]] will be ignored, but its properties and children will be applied to the Element provided. + * This is a low-level method. Users wil typically use a [[Projector]] instead. + * @param domNode - The existing element to adopt as the root of the new virtual DOM. Existing attributes and childnodes are preserved. + * @param vnode - The root of the virtual DOM tree that was created using the [[h]] function. NOTE: [[VNode]] objects + * may only be rendered once. + * @param projectionOptions - Options to be used to create and update the projection, see [[createProjector]]. + * @returns The [[Projection]] that was created. + */ + merge: function (element, vnode, projectionOptions) { + projectionOptions = applyDefaultProjectionOptions(projectionOptions); + vnode.domNode = element; + initPropertiesAndChildren(element, vnode, projectionOptions); + return createProjection(vnode, projectionOptions); + } + }; + /** + * Creates a [[CalculationCache]] object, useful for caching [[VNode]] trees. + * In practice, caching of [[VNode]] trees is not needed, because achieving 60 frames per second is almost never a problem. + * For more information, see [[CalculationCache]]. + * + * @param The type of the value that is cached. + */ + exports.createCache = function () { + var cachedInputs = undefined; + var cachedOutcome = undefined; + var result = { + invalidate: function () { + cachedOutcome = undefined; + cachedInputs = undefined; + }, + result: function (inputs, calculation) { + if (cachedInputs) { + for (var i = 0; i < inputs.length; i++) { + if (cachedInputs[i] !== inputs[i]) { + cachedOutcome = undefined; + } + } + } + if (!cachedOutcome) { + cachedOutcome = calculation(); + cachedInputs = inputs; + } + return cachedOutcome; + } + }; + return result; + }; + /** + * Creates a {@link Mapping} instance that keeps an array of result objects synchronized with an array of source objects. + * See {@link http://maquettejs.org/docs/arrays.html|Working with arrays}. + * + * @param The type of source items. A database-record for instance. + * @param The type of target items. A [[Component]] for instance. + * @param getSourceKey `function(source)` that must return a key to identify each source object. The result must either be a string or a number. + * @param createResult `function(source, index)` that must create a new result object from a given source. This function is identical + * to the `callback` argument in `Array.map(callback)`. + * @param updateResult `function(source, target, index)` that updates a result to an updated source. + */ + exports.createMapping = function (getSourceKey, createResult, updateResult) { + var keys = []; + var results = []; + return { + results: results, + map: function (newSources) { + var newKeys = newSources.map(getSourceKey); + var oldTargets = results.slice(); + var oldIndex = 0; + for (var i = 0; i < newSources.length; i++) { + var source = newSources[i]; + var sourceKey = newKeys[i]; + if (sourceKey === keys[oldIndex]) { + results[i] = oldTargets[oldIndex]; + updateResult(source, oldTargets[oldIndex], i); + oldIndex++; + } else { + var found = false; + for (var j = 1; j < keys.length; j++) { + var searchIndex = (oldIndex + j) % keys.length; + if (keys[searchIndex] === sourceKey) { + results[i] = oldTargets[searchIndex]; + updateResult(newSources[i], oldTargets[searchIndex], i); + oldIndex = searchIndex + 1; + found = true; + break; + } + } + if (!found) { + results[i] = createResult(source, i); + } + } + } + results.length = newSources.length; + keys = newKeys; + } + }; + }; + /** + * Creates a [[Projector]] instance using the provided projectionOptions. + * + * For more information, see [[Projector]]. + * + * @param projectionOptions Options that influence how the DOM is rendered and updated. + */ + exports.createProjector = function (projectorOptions) { + var projector; + var projectionOptions = applyDefaultProjectionOptions(projectorOptions); + projectionOptions.eventHandlerInterceptor = function (propertyName, eventHandler, domNode, properties) { + return function () { + // intercept function calls (event handlers) to do a render afterwards. + projector.scheduleRender(); + return eventHandler.apply(properties.bind || this, arguments); + }; + }; + var renderCompleted = true; + var scheduled; + var stopped = false; + var projections = []; + var renderFunctions = []; + // matches the projections array + var doRender = function () { + scheduled = undefined; + if (!renderCompleted) { + return; // The last render threw an error, it should be logged in the browser console. + } + renderCompleted = false; + for (var i = 0; i < projections.length; i++) { + var updatedVnode = renderFunctions[i](); + projections[i].update(updatedVnode); + } + renderCompleted = true; + }; + projector = { + scheduleRender: function () { + if (!scheduled && !stopped) { + scheduled = requestAnimationFrame(doRender); + } + }, + stop: function () { + if (scheduled) { + cancelAnimationFrame(scheduled); + scheduled = undefined; + } + stopped = true; + }, + resume: function () { + stopped = false; + renderCompleted = true; + projector.scheduleRender(); + }, + append: function (parentNode, renderMaquetteFunction) { + projections.push(exports.dom.append(parentNode, renderMaquetteFunction(), projectionOptions)); + renderFunctions.push(renderMaquetteFunction); + }, + insertBefore: function (beforeNode, renderMaquetteFunction) { + projections.push(exports.dom.insertBefore(beforeNode, renderMaquetteFunction(), projectionOptions)); + renderFunctions.push(renderMaquetteFunction); + }, + merge: function (domNode, renderMaquetteFunction) { + projections.push(exports.dom.merge(domNode, renderMaquetteFunction(), projectionOptions)); + renderFunctions.push(renderMaquetteFunction); + }, + replace: function (domNode, renderMaquetteFunction) { + var vnode = renderMaquetteFunction(); + createDom(vnode, domNode.parentNode, domNode, projectionOptions); + domNode.parentNode.removeChild(domNode); + projections.push(createProjection(vnode, projectionOptions)); + renderFunctions.push(renderMaquetteFunction); + }, + detach: function (renderMaquetteFunction) { + for (var i = 0; i < renderFunctions.length; i++) { + if (renderFunctions[i] === renderMaquetteFunction) { + renderFunctions.splice(i, 1); + return projections.splice(i, 1)[0]; + } + } + throw new Error('renderMaquetteFunction was not found'); + } + }; + return projector; + }; +})); diff --git a/plugins/UiConfig/media/js/utils/Animation.coffee b/plugins/UiConfig/media/js/utils/Animation.coffee new file mode 100644 index 000000000..271b88c1f --- /dev/null +++ b/plugins/UiConfig/media/js/utils/Animation.coffee @@ -0,0 +1,138 @@ +class Animation + slideDown: (elem, props) -> + if elem.offsetTop > 2000 + return + + h = elem.offsetHeight + cstyle = window.getComputedStyle(elem) + margin_top = cstyle.marginTop + margin_bottom = cstyle.marginBottom + padding_top = cstyle.paddingTop + padding_bottom = cstyle.paddingBottom + transition = cstyle.transition + + elem.style.boxSizing = "border-box" + elem.style.overflow = "hidden" + elem.style.transform = "scale(0.6)" + elem.style.opacity = "0" + elem.style.height = "0px" + elem.style.marginTop = "0px" + elem.style.marginBottom = "0px" + elem.style.paddingTop = "0px" + elem.style.paddingBottom = "0px" + elem.style.transition = "none" + + setTimeout (-> + elem.className += " animate-inout" + elem.style.height = h+"px" + elem.style.transform = "scale(1)" + elem.style.opacity = "1" + elem.style.marginTop = margin_top + elem.style.marginBottom = margin_bottom + elem.style.paddingTop = padding_top + elem.style.paddingBottom = padding_bottom + ), 1 + + elem.addEventListener "transitionend", -> + elem.classList.remove("animate-inout") + elem.style.transition = elem.style.transform = elem.style.opacity = elem.style.height = null + elem.style.boxSizing = elem.style.marginTop = elem.style.marginBottom = null + elem.style.paddingTop = elem.style.paddingBottom = elem.style.overflow = null + elem.removeEventListener "transitionend", arguments.callee, false + + + slideUp: (elem, remove_func, props) -> + if elem.offsetTop > 1000 + return remove_func() + + elem.className += " animate-back" + elem.style.boxSizing = "border-box" + elem.style.height = elem.offsetHeight+"px" + elem.style.overflow = "hidden" + elem.style.transform = "scale(1)" + elem.style.opacity = "1" + elem.style.pointerEvents = "none" + setTimeout (-> + elem.style.height = "0px" + elem.style.marginTop = "0px" + elem.style.marginBottom = "0px" + elem.style.paddingTop = "0px" + elem.style.paddingBottom = "0px" + elem.style.transform = "scale(0.8)" + elem.style.borderTopWidth = "0px" + elem.style.borderBottomWidth = "0px" + elem.style.opacity = "0" + ), 1 + elem.addEventListener "transitionend", (e) -> + if e.propertyName == "opacity" or e.elapsedTime >= 0.6 + elem.removeEventListener "transitionend", arguments.callee, false + remove_func() + + + slideUpInout: (elem, remove_func, props) -> + elem.className += " animate-inout" + elem.style.boxSizing = "border-box" + elem.style.height = elem.offsetHeight+"px" + elem.style.overflow = "hidden" + elem.style.transform = "scale(1)" + elem.style.opacity = "1" + elem.style.pointerEvents = "none" + setTimeout (-> + elem.style.height = "0px" + elem.style.marginTop = "0px" + elem.style.marginBottom = "0px" + elem.style.paddingTop = "0px" + elem.style.paddingBottom = "0px" + elem.style.transform = "scale(0.8)" + elem.style.borderTopWidth = "0px" + elem.style.borderBottomWidth = "0px" + elem.style.opacity = "0" + ), 1 + elem.addEventListener "transitionend", (e) -> + if e.propertyName == "opacity" or e.elapsedTime >= 0.6 + elem.removeEventListener "transitionend", arguments.callee, false + remove_func() + + + showRight: (elem, props) -> + elem.className += " animate" + elem.style.opacity = 0 + elem.style.transform = "TranslateX(-20px) Scale(1.01)" + setTimeout (-> + elem.style.opacity = 1 + elem.style.transform = "TranslateX(0px) Scale(1)" + ), 1 + elem.addEventListener "transitionend", -> + elem.classList.remove("animate") + elem.style.transform = elem.style.opacity = null + + + show: (elem, props) -> + delay = arguments[arguments.length-2]?.delay*1000 or 1 + elem.style.opacity = 0 + setTimeout (-> + elem.className += " animate" + ), 1 + setTimeout (-> + elem.style.opacity = 1 + ), delay + elem.addEventListener "transitionend", -> + elem.classList.remove("animate") + elem.style.opacity = null + elem.removeEventListener "transitionend", arguments.callee, false + + hide: (elem, remove_func, props) -> + delay = arguments[arguments.length-2]?.delay*1000 or 1 + elem.className += " animate" + setTimeout (-> + elem.style.opacity = 0 + ), delay + elem.addEventListener "transitionend", (e) -> + if e.propertyName == "opacity" + remove_func() + + addVisibleClass: (elem, props) -> + setTimeout -> + elem.classList.add("visible") + +window.Animation = new Animation() \ No newline at end of file diff --git a/plugins/UiConfig/media/js/utils/Dollar.coffee b/plugins/UiConfig/media/js/utils/Dollar.coffee new file mode 100644 index 000000000..7f19f5515 --- /dev/null +++ b/plugins/UiConfig/media/js/utils/Dollar.coffee @@ -0,0 +1,3 @@ +window.$ = (selector) -> + if selector.startsWith("#") + return document.getElementById(selector.replace("#", "")) diff --git a/plugins/UiConfig/media/js/utils/ZeroFrame.coffee b/plugins/UiConfig/media/js/utils/ZeroFrame.coffee new file mode 100644 index 000000000..11512d161 --- /dev/null +++ b/plugins/UiConfig/media/js/utils/ZeroFrame.coffee @@ -0,0 +1,85 @@ +class ZeroFrame extends Class + constructor: (url) -> + @url = url + @waiting_cb = {} + @wrapper_nonce = document.location.href.replace(/.*wrapper_nonce=([A-Za-z0-9]+).*/, "$1") + @connect() + @next_message_id = 1 + @history_state = {} + @init() + + + init: -> + @ + + + connect: -> + @target = window.parent + window.addEventListener("message", @onMessage, false) + @cmd("innerReady") + + # Save scrollTop + window.addEventListener "beforeunload", (e) => + @log "save scrollTop", window.pageYOffset + @history_state["scrollTop"] = window.pageYOffset + @cmd "wrapperReplaceState", [@history_state, null] + + # Restore scrollTop + @cmd "wrapperGetState", [], (state) => + @history_state = state if state? + @log "restore scrollTop", state, window.pageYOffset + if window.pageYOffset == 0 and state + window.scroll(window.pageXOffset, state.scrollTop) + + + onMessage: (e) => + message = e.data + cmd = message.cmd + if cmd == "response" + if @waiting_cb[message.to]? + @waiting_cb[message.to](message.result) + else + @log "Websocket callback not found:", message + else if cmd == "wrapperReady" # Wrapper inited later + @cmd("innerReady") + else if cmd == "ping" + @response message.id, "pong" + else if cmd == "wrapperOpenedWebsocket" + @onOpenWebsocket() + else if cmd == "wrapperClosedWebsocket" + @onCloseWebsocket() + else + @onRequest cmd, message.params + + + onRequest: (cmd, message) => + @log "Unknown request", message + + + response: (to, result) -> + @send {"cmd": "response", "to": to, "result": result} + + + cmd: (cmd, params={}, cb=null) -> + @send {"cmd": cmd, "params": params}, cb + + + send: (message, cb=null) -> + message.wrapper_nonce = @wrapper_nonce + message.id = @next_message_id + @next_message_id += 1 + @target.postMessage(message, "*") + if cb + @waiting_cb[message.id] = cb + + + onOpenWebsocket: => + @log "Websocket open" + + + onCloseWebsocket: => + @log "Websocket close" + + + +window.ZeroFrame = ZeroFrame diff --git a/plugins/Zeroname/README.md b/plugins/Zeroname/README.md new file mode 100644 index 000000000..8a306789b --- /dev/null +++ b/plugins/Zeroname/README.md @@ -0,0 +1,54 @@ +# ZeroName + +Zeroname plugin to connect Namecoin and register all the .bit domain name. + +## Start + +You can create your own Zeroname. + +### Namecoin node + +You need to run a namecoin node. + +[Namecoin](https://namecoin.org/download/) + +You will need to start it as a RPC server. + +Example of `~/.namecoin/namecoin.conf` minimal setup: +``` +daemon=1 +rpcuser=your-name +rpcpassword=your-password +rpcport=8336 +server=1 +txindex=1 +``` + +Don't forget to change the `rpcuser` value and `rpcpassword` value! + +You can start your node : `./namecoind` + +### Create a Zeroname site + +You will also need to create a site `python zeronet.py createSite` and regitser the info. + +In the site you will need to create a file `./data//data/names.json` with this is it: +``` +{} +``` + +### `zeroname_config.json` file + +In `~/.namecoin/zeroname_config.json` +``` +{ + "lastprocessed": 223910, + "zeronet_path": "/root/ZeroNet", # Update with your path + "privatekey": "", # Update with your private key of your site + "site": "" # Update with the address of your site +} +``` + +### Run updater + +You can now run the script : `updater/zeroname_updater.py` and wait until it is fully sync (it might take a while). diff --git a/plugins/Zeroname/SiteManagerPlugin.py b/plugins/Zeroname/SiteManagerPlugin.py index fcf666ae3..40088f12f 100644 --- a/plugins/Zeroname/SiteManagerPlugin.py +++ b/plugins/Zeroname/SiteManagerPlugin.py @@ -1,5 +1,6 @@ import logging import re +import time from Config import config from Plugin import PluginManager @@ -12,22 +13,25 @@ @PluginManager.registerTo("SiteManager") class SiteManagerPlugin(object): site_zeroname = None + db_domains = None + db_domains_modified = None def load(self, *args, **kwargs): super(SiteManagerPlugin, self).load(*args, **kwargs) if not self.get(config.bit_resolver): self.need(config.bit_resolver) # Need ZeroName site - # Checks if its a valid address + # Checks if it's a valid address def isAddress(self, address): - if self.isDomain(address): - return True - else: - return super(SiteManagerPlugin, self).isAddress(address) + return self.isBitDomain(address) or super(SiteManagerPlugin, self).isAddress(address) # Return: True if the address is domain def isDomain(self, address): - return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) + return self.isBitDomain(address) or super(SiteManagerPlugin, self).isDomain(address) + + # Return: True if the address is .bit domain + def isBitDomain(self, address): + return re.match(r"(.*?)([A-Za-z0-9_-]+\.bit)$", address) # Resolve domain # Return: The address or None @@ -35,27 +39,36 @@ def resolveDomain(self, domain): domain = domain.lower() if not self.site_zeroname: self.site_zeroname = self.need(config.bit_resolver) - self.site_zeroname.needFile("data/names.json", priority=10) - db = self.site_zeroname.storage.loadJson("data/names.json") - return db.get(domain) + + site_zeroname_modified = self.site_zeroname.content_manager.contents.get("content.json", {}).get("modified", 0) + if not self.db_domains or self.db_domains_modified != site_zeroname_modified: + self.site_zeroname.needFile("data/names.json", priority=10) + s = time.time() + self.db_domains = self.site_zeroname.storage.loadJson("data/names.json") + log.debug( + "Domain db with %s entries loaded in %.3fs (modification: %s -> %s)" % + (len(self.db_domains), time.time() - s, self.db_domains_modified, site_zeroname_modified) + ) + self.db_domains_modified = site_zeroname_modified + return self.db_domains.get(domain) # Return or create site and start download site files # Return: Site or None if dns resolve failed - def need(self, address, all_file=True): - if self.isDomain(address): # Its looks like a domain + def need(self, address, *args, **kwargs): + if self.isBitDomain(address): # Its looks like a domain address_resolved = self.resolveDomain(address) if address_resolved: address = address_resolved else: return None - return super(SiteManagerPlugin, self).need(address, all_file) + return super(SiteManagerPlugin, self).need(address, *args, **kwargs) # Return: Site object or None if not found def get(self, address): - if self.sites is None: # Not loaded yet + if not self.loaded: # Not loaded yet self.load() - if self.isDomain(address): # Its looks like a domain + if self.isBitDomain(address): # Its looks like a domain address_resolved = self.resolveDomain(address) if address_resolved: # Domain found site = self.sites.get(address_resolved) @@ -67,5 +80,5 @@ def get(self, address): site = self.sites.get(address) else: # Access by site address - site = self.sites.get(address) + site = super(SiteManagerPlugin, self).get(address) return site diff --git a/plugins/Zeroname/UiRequestPlugin.py b/plugins/Zeroname/UiRequestPlugin.py index d1e713625..b0230524a 100644 --- a/plugins/Zeroname/UiRequestPlugin.py +++ b/plugins/Zeroname/UiRequestPlugin.py @@ -12,35 +12,14 @@ def __init__(self, *args, **kwargs): super(UiRequestPlugin, self).__init__(*args, **kwargs) # Media request - def actionSiteMedia(self, path): - match = re.match("/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) + def actionSiteMedia(self, path, **kwargs): + match = re.match(r"/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) if match: # Its a valid domain, resolve first domain = match.group("address") address = self.site_manager.resolveDomain(domain) if address: path = "/media/" + address + match.group("inner_path") - return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output - - # Is mediarequest allowed from that referer - def isMediaRequestAllowed(self, site_address, referer): - referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address - referer_path = re.sub("\?.*", "", referer_path) # Remove http params - - if not re.sub("^http[s]{0,1}://", "", referer).startswith(self.env["HTTP_HOST"]): # Different origin - return False - - if self.isProxyRequest(): # Match to site domain - referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access - referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1) - else: # Match to request path - referer_site_address = re.match("/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") - - if referer_site_address == site_address: # Referer site address as simple address - return True - elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns - return True - else: # Invalid referer - return False + return super(UiRequestPlugin, self).actionSiteMedia(path, **kwargs) # Get the wrapper frame output @PluginManager.registerTo("ConfigPlugin") class ConfigPlugin(object): diff --git a/plugins/Zeroname/updater/zeroname_updater.py b/plugins/Zeroname/updater/zeroname_updater.py index 711ee1bc1..a8eedd13a 100644 --- a/plugins/Zeroname/updater/zeroname_updater.py +++ b/plugins/Zeroname/updater/zeroname_updater.py @@ -22,9 +22,33 @@ def processNameOp(domain, value, test=False): except Exception, err: print "Json load error: %s" % err return False - if "zeronet" not in data: - print "No zeronet in ", data.keys() + if "zeronet" not in data and "map" not in data: + # Namecoin standard use {"map": { "blog": {"zeronet": "1D..."} }} + print "No zeronet and no map in ", data.keys() return False + if "map" in data: + # If subdomains using the Namecoin standard is present, just re-write in the Zeronet way + # and call the function again + data_map = data["map"] + new_value = {} + for subdomain in data_map: + if "zeronet" in data_map[subdomain]: + new_value[subdomain] = data_map[subdomain]["zeronet"] + if "zeronet" in data and isinstance(data["zeronet"], basestring): + # { + # "zeronet":"19rXKeKptSdQ9qt7omwN82smehzTuuq6S9", + # .... + # } + new_value[""] = data["zeronet"] + if len(new_value) > 0: + return processNameOp(domain, json.dumps({"zeronet": new_value}), test) + else: + return False + if "zeronet" in data and isinstance(data["zeronet"], basestring): + # { + # "zeronet":"19rXKeKptSdQ9qt7omwN82smehzTuuq6S9" + # } is valid + return processNameOp(domain, json.dumps({"zeronet": { "": data["zeronet"]}}), test) if not isinstance(data["zeronet"], dict): print "Not dict: ", data["zeronet"] return False @@ -145,10 +169,15 @@ def initRpc(config): rpc_auth, rpc_timeout = initRpc(namecoin_location + "namecoin.conf") rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) +node_version = rpc.getnetworkinfo()['version'] + while 1: try: time.sleep(1) - last_block = int(rpc.getinfo()["blocks"]) + if node_version < 160000 : + last_block = int(rpc.getinfo()["blocks"]) + else: + last_block = int(rpc.getblockchaininfo()["blocks"]) break # Connection succeeded except socket.timeout: # Timeout print ".", @@ -168,6 +197,7 @@ def initRpc(config): assert not processBlock(236824, test=True) # Utf8 domain name (invalid should skip) assert not processBlock(236752, test=True) # Uppercase domain (invalid should skip) assert processBlock(236870, test=True) # Encoded domain (should pass) +assert processBlock(438317, test=True) # Testing namecoin standard artifaxradio.bit (should pass) # sys.exit(0) print "- Parsing skipped blocks..." @@ -186,7 +216,10 @@ def initRpc(config): while 1: try: time.sleep(1) - rpc.waitforblock() + if node_version < 160000 : + rpc.waitforblock() + else: + rpc.waitfornewblock() print "Found" break # Block found except socket.timeout: # Timeout @@ -197,7 +230,10 @@ def initRpc(config): time.sleep(5) rpc = AuthServiceProxy(rpc_auth, timeout=rpc_timeout) - last_block = int(rpc.getinfo()["blocks"]) + if node_version < 160000 : + last_block = int(rpc.getinfo()["blocks"]) + else: + last_block = int(rpc.getblockchaininfo()["blocks"]) should_publish = False for block_id in range(config["lastprocessed"] + 1, last_block + 1): if processBlock(block_id): diff --git a/plugins/disabled-Bootstrapper/BootstrapperDb.py b/plugins/disabled-Bootstrapper/BootstrapperDb.py index 44cf58c9e..080051508 100644 --- a/plugins/disabled-Bootstrapper/BootstrapperDb.py +++ b/plugins/disabled-Bootstrapper/BootstrapperDb.py @@ -10,7 +10,7 @@ class BootstrapperDb(Db): def __init__(self): - self.version = 6 + self.version = 7 self.hash_ids = {} # hash -> id cache super(BootstrapperDb, self).__init__({"db_name": "Bootstrapper"}, "%s/bootstrapper.db" % config.data_dir) self.foreign_keys = True @@ -20,8 +20,9 @@ def __init__(self): def cleanup(self): while 1: - self.execute("DELETE FROM peer WHERE date_announced < DATETIME('now', '-40 minute')") - time.sleep(4*60) + time.sleep(4 * 60) + timeout = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time() - 60 * 40)) + self.execute("DELETE FROM peer WHERE date_announced < ?", [timeout]) def updateHashCache(self): res = self.execute("SELECT * FROM hash") @@ -46,14 +47,15 @@ def createTables(self): # Create new tables self.execute(""" CREATE TABLE peer ( - peer_id INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE, + peer_id INTEGER PRIMARY KEY ASC AUTOINCREMENT NOT NULL UNIQUE, + type TEXT, + address TEXT, port INTEGER NOT NULL, - ip4 TEXT, - onion TEXT, date_added DATETIME DEFAULT (CURRENT_TIMESTAMP), date_announced DATETIME DEFAULT (CURRENT_TIMESTAMP) ); """) + self.execute("CREATE UNIQUE INDEX peer_key ON peer (address, port);") self.execute(""" CREATE TABLE peer_to_hash ( @@ -81,29 +83,24 @@ def getHashId(self, hash): self.hash_ids[hash] = self.cur.cursor.lastrowid return self.hash_ids[hash] - def peerAnnounce(self, ip4=None, onion=None, port=None, hashes=[], onion_signed=False, delete_missing_hashes=False): + def peerAnnounce(self, ip_type, address, port=None, hashes=[], onion_signed=False, delete_missing_hashes=False): hashes_ids_announced = [] for hash in hashes: hashes_ids_announced.append(self.getHashId(hash)) - if not ip4 and not onion: - return 0 - # Check user - if onion: - res = self.execute("SELECT * FROM peer WHERE ? LIMIT 1", {"onion": onion}) - else: - res = self.execute("SELECT * FROM peer WHERE ? LIMIT 1", {"ip4": ip4, "port": port}) + res = self.execute("SELECT peer_id FROM peer WHERE ? LIMIT 1", {"address": address, "port": port}) user_row = res.fetchone() + now = time.strftime("%Y-%m-%d %H:%M:%S") if user_row: peer_id = user_row["peer_id"] - self.execute("UPDATE peer SET date_announced = DATETIME('now') WHERE ?", {"peer_id": peer_id}) + self.execute("UPDATE peer SET date_announced = ? WHERE peer_id = ?", (now, peer_id)) else: - self.log.debug("New peer: %s %s signed: %s" % (ip4, onion, onion_signed)) - if onion and not onion_signed: + self.log.debug("New peer: %s signed: %s" % (address, onion_signed)) + if ip_type == "onion" and not onion_signed: return len(hashes) - self.execute("INSERT INTO peer ?", {"ip4": ip4, "onion": onion, "port": port}) + self.execute("INSERT INTO peer ?", {"type": ip_type, "address": address, "port": port, "date_announced": now}) peer_id = self.cur.cursor.lastrowid # Check user's hashes @@ -112,7 +109,7 @@ def peerAnnounce(self, ip4=None, onion=None, port=None, hashes=[], onion_signed= if hash_ids_db != hashes_ids_announced: hash_ids_added = set(hashes_ids_announced) - set(hash_ids_db) hash_ids_removed = set(hash_ids_db) - set(hashes_ids_announced) - if not onion or onion_signed: + if ip_type != "onion" or onion_signed: for hash_id in hash_ids_added: self.execute("INSERT INTO peer_to_hash ?", {"peer_id": peer_id, "hash_id": hash_id}) if hash_ids_removed and delete_missing_hashes: @@ -122,37 +119,38 @@ def peerAnnounce(self, ip4=None, onion=None, port=None, hashes=[], onion_signed= else: return 0 - def peerList(self, hash, ip4=None, onions=[], port=None, limit=30, need_types=["ip4", "onion"]): - hash_peers = {"ip4": [], "onion": []} + def peerList(self, hash, address=None, onions=[], port=None, limit=30, need_types=["ipv4", "onion"], order=True): + back = {"ipv4": [], "ipv6": [], "onion": []} if limit == 0: - return hash_peers + return back hashid = self.getHashId(hash) - where = "hash_id = :hashid" + if order: + order_sql = "ORDER BY date_announced DESC" + else: + order_sql = "" + where_sql = "hash_id = :hashid" if onions: - onions_escaped = ["'%s'" % re.sub("[^a-z0-9,]", "", onion) for onion in onions] - where += " AND (onion NOT IN (%s) OR onion IS NULL)" % ",".join(onions_escaped) - elif ip4: - where += " AND (NOT (ip4 = :ip4 AND port = :port) OR ip4 IS NULL)" + onions_escaped = ["'%s'" % re.sub("[^a-z0-9,]", "", onion) for onion in onions if type(onion) is str] + where_sql += " AND address NOT IN (%s)" % ",".join(onions_escaped) + elif address: + where_sql += " AND NOT (address = :address AND port = :port)" query = """ - SELECT ip4, port, onion + SELECT type, address, port FROM peer_to_hash LEFT JOIN peer USING (peer_id) WHERE %s - ORDER BY date_announced DESC + %s LIMIT :limit - """ % where - res = self.execute(query, {"hashid": hashid, "ip4": ip4, "onions": onions, "port": port, "limit": limit}) + """ % (where_sql, order_sql) + res = self.execute(query, {"hashid": hashid, "address": address, "port": port, "limit": limit}) for row in res: - if row["ip4"] and "ip4" in need_types: - hash_peers["ip4"].append( - helper.packAddress(row["ip4"], row["port"]) - ) - if row["onion"] and "onion" in need_types: - hash_peers["onion"].append( - helper.packOnionAddress(row["onion"], row["port"]) - ) - - return hash_peers + if row["type"] in need_types: + if row["type"] == "onion": + packed = helper.packOnionAddress(row["address"], row["port"]) + else: + packed = helper.packAddress(str(row["address"]), row["port"]) + back[row["type"]].append(packed) + return back diff --git a/plugins/disabled-Bootstrapper/BootstrapperPlugin.py b/plugins/disabled-Bootstrapper/BootstrapperPlugin.py index 7d4360c21..ba6d1e235 100644 --- a/plugins/disabled-Bootstrapper/BootstrapperPlugin.py +++ b/plugins/disabled-Bootstrapper/BootstrapperPlugin.py @@ -1,80 +1,132 @@ import time +from util import helper + from Plugin import PluginManager from BootstrapperDb import BootstrapperDb from Crypt import CryptRsa +from Config import config -if "db" not in locals().keys(): # Share durin reloads +if "db" not in locals().keys(): # Share during reloads db = BootstrapperDb() @PluginManager.registerTo("FileRequest") class FileRequestPlugin(object): - def actionAnnounce(self, params): - hashes = params["hashes"] + def checkOnionSigns(self, onions, onion_signs, onion_sign_this): + if not onion_signs or len(onion_signs) != len(set(onions)): + return False - if "onion_signs" in params and len(params["onion_signs"]) == len(hashes): - # Check if all sign is correct - if time.time() - float(params["onion_sign_this"]) < 3*60: # Peer has 3 minute to sign the message - onions_signed = [] - # Check onion signs - for onion_publickey, onion_sign in params["onion_signs"].items(): - if CryptRsa.verify(params["onion_sign_this"], onion_publickey, onion_sign): - onions_signed.append(CryptRsa.publickeyToOnion(onion_publickey)) - else: - break - # Check if the same onion addresses signed as the announced onces - if sorted(onions_signed) == sorted(params["onions"]): - all_onions_signed = True - else: - all_onions_signed = False + if time.time() - float(onion_sign_this) > 3 * 60: + return False # Signed out of allowed 3 minutes + + onions_signed = [] + # Check onion signs + for onion_publickey, onion_sign in onion_signs.items(): + if CryptRsa.verify(onion_sign_this, onion_publickey, onion_sign): + onions_signed.append(CryptRsa.publickeyToOnion(onion_publickey)) else: - # Onion sign this out of 3 minute - all_onions_signed = False + break + + # Check if the same onion addresses signed as the announced onces + if sorted(onions_signed) == sorted(set(onions)): + return True else: - # Incorrect signs number - all_onions_signed = False + return False + + def actionAnnounce(self, params): + time_started = time.time() + s = time.time() + # Backward compatibility + if "ip4" in params["add"]: + params["add"].append("ipv4") + if "ip4" in params["need_types"]: + params["need_types"].append("ipv4") + + hashes = params["hashes"] + + all_onions_signed = self.checkOnionSigns(params.get("onions", []), params.get("onion_signs"), params.get("onion_sign_this")) + + time_onion_check = time.time() - s - if "ip4" in params["add"] and self.connection.ip != "127.0.0.1" and not self.connection.ip.endswith(".onion"): - ip4 = self.connection.ip + ip_type = helper.getIpType(self.connection.ip) + + if ip_type == "onion" or self.connection.ip in config.ip_local: + is_port_open = False + elif ip_type in params["add"]: + is_port_open = True else: - ip4 = None + is_port_open = False + s = time.time() # Separatley add onions to sites or at once if no onions present - hashes_changed = 0 i = 0 + onion_to_hash = {} for onion in params.get("onions", []): + if onion not in onion_to_hash: + onion_to_hash[onion] = [] + onion_to_hash[onion].append(hashes[i]) + i += 1 + + hashes_changed = 0 + db.execute("BEGIN") + for onion, onion_hashes in onion_to_hash.iteritems(): hashes_changed += db.peerAnnounce( - onion=onion, + ip_type="onion", + address=onion, port=params["port"], - hashes=[hashes[i]], + hashes=onion_hashes, onion_signed=all_onions_signed ) - i += 1 - # Announce all sites if ip4 defined - if ip4: + db.execute("END") + time_db_onion = time.time() - s + + s = time.time() + + if is_port_open: hashes_changed += db.peerAnnounce( - ip4=ip4, + ip_type=ip_type, + address=self.connection.ip, port=params["port"], hashes=hashes, delete_missing_hashes=params.get("delete") ) + time_db_ip = time.time() - s + s = time.time() # Query sites back = {} peers = [] if params.get("onions") and not all_onions_signed and hashes_changed: back["onion_sign_this"] = "%.0f" % time.time() # Send back nonce for signing + if len(hashes) > 500 or not hashes_changed: + limit = 5 + order = False + else: + limit = 30 + order = True for hash in hashes: + if time.time() - time_started > 1: # 1 sec limit on request + self.connection.log("Announce time limit exceeded after %s/%s sites" % (len(peers), len(hashes))) + break + hash_peers = db.peerList( hash, - ip4=self.connection.ip, onions=params.get("onions"), port=params["port"], - limit=min(30, params["need_num"]), need_types=params["need_types"] + address=self.connection.ip, onions=onion_to_hash.keys(), port=params["port"], + limit=min(limit, params["need_num"]), need_types=params["need_types"], order=order ) + if "ip4" in params["need_types"]: # Backward compatibility + hash_peers["ip4"] = hash_peers["ipv4"] + del(hash_peers["ipv4"]) peers.append(hash_peers) + time_peerlist = time.time() - s back["peers"] = peers + self.connection.log( + "Announce %s sites (onions: %s, onion_check: %.3fs, db_onion: %.3fs, db_ip: %.3fs, peerlist: %.3fs, limit: %s)" % + (len(hashes), len(onion_to_hash), time_onion_check, time_db_onion, time_db_ip, time_peerlist, limit) + ) self.response(back) diff --git a/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py b/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py index e49bfd3e8..d99f8ea7f 100644 --- a/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py +++ b/plugins/disabled-Bootstrapper/Test/TestBootstrapper.py @@ -27,8 +27,9 @@ def cleanup(): @pytest.mark.usefixtures("resetSettings") class TestBootstrapper: - def testIp4(self, file_server, bootstrapper_db): - peer = Peer("127.0.0.1", 1544, connection_server=file_server) + def testBootstrapperDb(self, file_server, bootstrapper_db): + ip_type = helper.getIpType(file_server.ip) + peer = Peer(file_server.ip, 1544, connection_server=file_server) hash1 = hashlib.sha256("site1").digest() hash2 = hashlib.sha256("site2").digest() hash3 = hashlib.sha256("site3").digest() @@ -36,84 +37,85 @@ def testIp4(self, file_server, bootstrapper_db): # Verify empty result res = peer.request("announce", { "hashes": [hash1, hash2], - "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] + "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] }) - assert len(res["peers"][0]["ip4"]) == 0 # Empty result + assert len(res["peers"][0][ip_type]) == 0 # Empty result # Verify added peer on previous request - bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=15441, hashes=[hash1, hash2], delete_missing_hashes=True) + bootstrapper_db.peerAnnounce(ip_type, file_server.ip_external, port=15441, hashes=[hash1, hash2], delete_missing_hashes=True) res = peer.request("announce", { "hashes": [hash1, hash2], - "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] + "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] }) - assert len(res["peers"][0]["ip4"]) == 1 - assert len(res["peers"][1]["ip4"]) == 1 + assert len(res["peers"][0][ip_type]) == 1 + assert len(res["peers"][1][ip_type]) == 1 # hash2 deleted from 1.2.3.4 - bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=15441, hashes=[hash1], delete_missing_hashes=True) + bootstrapper_db.peerAnnounce(ip_type, file_server.ip_external, port=15441, hashes=[hash1], delete_missing_hashes=True) res = peer.request("announce", { "hashes": [hash1, hash2], - "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] + "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] }) - assert len(res["peers"][0]["ip4"]) == 1 - assert len(res["peers"][1]["ip4"]) == 0 + assert len(res["peers"][0][ip_type]) == 1 + assert len(res["peers"][1][ip_type]) == 0 # Announce 3 hash again - bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=15441, hashes=[hash1, hash2, hash3], delete_missing_hashes=True) + bootstrapper_db.peerAnnounce(ip_type, file_server.ip_external, port=15441, hashes=[hash1, hash2, hash3], delete_missing_hashes=True) res = peer.request("announce", { "hashes": [hash1, hash2, hash3], - "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] + "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] }) - assert len(res["peers"][0]["ip4"]) == 1 - assert len(res["peers"][1]["ip4"]) == 1 - assert len(res["peers"][2]["ip4"]) == 1 + assert len(res["peers"][0][ip_type]) == 1 + assert len(res["peers"][1][ip_type]) == 1 + assert len(res["peers"][2][ip_type]) == 1 # Single hash announce res = peer.request("announce", { - "hashes": [hash1], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": ["ip4"] + "hashes": [hash1], "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [ip_type] }) - assert len(res["peers"][0]["ip4"]) == 1 + assert len(res["peers"][0][ip_type]) == 1 # Test DB cleanup - assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer").fetchone()["num"] == 1 # 127.0.0.1 never get added to db + assert map(lambda row: row[0], bootstrapper_db.execute("SELECT address FROM peer").fetchall()) == [file_server.ip_external] # 127.0.0.1 never get added to db # Delete peers - bootstrapper_db.execute("DELETE FROM peer WHERE ip4 = '1.2.3.4'") + bootstrapper_db.execute("DELETE FROM peer WHERE address = ?", [file_server.ip_external]) assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer_to_hash").fetchone()["num"] == 0 assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM hash").fetchone()["num"] == 3 # 3 sites assert bootstrapper_db.execute("SELECT COUNT(*) AS num FROM peer").fetchone()["num"] == 0 # 0 peer def testPassive(self, file_server, bootstrapper_db): - peer = Peer("127.0.0.1", 1544, connection_server=file_server) + peer = Peer(file_server.ip, 1544, connection_server=file_server) + ip_type = helper.getIpType(file_server.ip) hash1 = hashlib.sha256("hash1").digest() - bootstrapper_db.peerAnnounce(ip4=None, port=15441, hashes=[hash1]) + bootstrapper_db.peerAnnounce(ip_type, address=None, port=15441, hashes=[hash1]) res = peer.request("announce", { - "hashes": [hash1], "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": [] + "hashes": [hash1], "port": 15441, "need_types": [ip_type], "need_num": 10, "add": [] }) - assert len(res["peers"][0]["ip4"]) == 0 # Empty result + assert len(res["peers"][0]["ipv4"]) == 0 # Empty result def testAddOnion(self, file_server, site, bootstrapper_db, tor_manager): onion1 = tor_manager.addOnion() onion2 = tor_manager.addOnion() - peer = Peer("127.0.0.1", 1544, connection_server=file_server) + peer = Peer(file_server.ip, 1544, connection_server=file_server) hash1 = hashlib.sha256("site1").digest() hash2 = hashlib.sha256("site2").digest() + hash3 = hashlib.sha256("site3").digest() - bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=1234, hashes=[hash1, hash2]) + bootstrapper_db.peerAnnounce(ip_type="ipv4", address="1.2.3.4", port=1234, hashes=[hash1, hash2, hash3]) res = peer.request("announce", { - "onions": [onion1, onion2], - "hashes": [hash1, hash2], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] + "onions": [onion1, onion1, onion2], + "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] }) - assert len(res["peers"][0]["ip4"]) == 1 - assert "onion_sign_this" in res + assert len(res["peers"][0]["ipv4"]) == 1 # Onion address not added yet - site_peers = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) + site_peers = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) assert len(site_peers["onion"]) == 0 assert "onion_sign_this" in res @@ -125,55 +127,98 @@ def testAddOnion(self, file_server, site, bootstrapper_db, tor_manager): res = peer.request("announce", { "onions": [onion1], "onion_sign_this": res["onion_sign_this"], "onion_signs": {tor_manager.getPublickey(onion2): sign2}, - "hashes": [hash1], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] + "hashes": [hash1], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] }) assert "onion_sign_this" in res - site_peers1 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) + site_peers1 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) assert len(site_peers1["onion"]) == 0 # Not added # Bad sign (missing one) res = peer.request("announce", { - "onions": [onion1, onion2], "onion_sign_this": res["onion_sign_this"], + "onions": [onion1, onion1, onion2], "onion_sign_this": res["onion_sign_this"], "onion_signs": {tor_manager.getPublickey(onion1): sign1}, - "hashes": [hash1, hash2], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] + "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] }) assert "onion_sign_this" in res - site_peers1 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) + site_peers1 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) assert len(site_peers1["onion"]) == 0 # Not added # Good sign res = peer.request("announce", { - "onions": [onion1, onion2], "onion_sign_this": res["onion_sign_this"], + "onions": [onion1, onion1, onion2], "onion_sign_this": res["onion_sign_this"], "onion_signs": {tor_manager.getPublickey(onion1): sign1, tor_manager.getPublickey(onion2): sign2}, - "hashes": [hash1, hash2], "port": 15441, "need_types": ["ip4", "onion"], "need_num": 10, "add": ["onion"] + "hashes": [hash1, hash2, hash3], "port": 15441, "need_types": ["ipv4", "onion"], "need_num": 10, "add": ["onion"] }) assert "onion_sign_this" not in res # Onion addresses added - site_peers1 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash1) + site_peers1 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash1) assert len(site_peers1["onion"]) == 1 - site_peers2 = bootstrapper_db.peerList(ip4="1.2.3.4", port=1234, hash=hash2) + site_peers2 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash2) assert len(site_peers2["onion"]) == 1 + site_peers3 = bootstrapper_db.peerList(address="1.2.3.4", port=1234, hash=hash3) + assert len(site_peers3["onion"]) == 1 - assert site_peers1["onion"][0] != site_peers2["onion"][0] - assert helper.unpackOnionAddress(site_peers1["onion"][0])[0] == onion1+".onion" - assert helper.unpackOnionAddress(site_peers2["onion"][0])[0] == onion2+".onion" + assert site_peers1["onion"][0] == site_peers2["onion"][0] + assert site_peers2["onion"][0] != site_peers3["onion"][0] + assert helper.unpackOnionAddress(site_peers1["onion"][0])[0] == onion1 + ".onion" + assert helper.unpackOnionAddress(site_peers2["onion"][0])[0] == onion1 + ".onion" + assert helper.unpackOnionAddress(site_peers3["onion"][0])[0] == onion2 + ".onion" tor_manager.delOnion(onion1) tor_manager.delOnion(onion2) def testRequestPeers(self, file_server, site, bootstrapper_db, tor_manager): site.connection_server = file_server + file_server.tor_manager = tor_manager hash = hashlib.sha256(site.address).digest() # Request peers from tracker assert len(site.peers) == 0 - bootstrapper_db.peerAnnounce(ip4="1.2.3.4", port=1234, hashes=[hash]) - site.announceTracker("zero", "127.0.0.1:1544") + bootstrapper_db.peerAnnounce(ip_type="ipv4", address="1.2.3.4", port=1234, hashes=[hash]) + site.announcer.announceTracker("zero://%s:%s" % (file_server.ip, file_server.port)) assert len(site.peers) == 1 # Test onion address store - bootstrapper_db.peerAnnounce(onion="bka4ht2bzxchy44r", port=1234, hashes=[hash], onion_signed=True) - site.announceTracker("zero", "127.0.0.1:1544") + bootstrapper_db.peerAnnounce(ip_type="onion", address="bka4ht2bzxchy44r", port=1234, hashes=[hash], onion_signed=True) + site.announcer.announceTracker("zero://%s:%s" % (file_server.ip, file_server.port)) assert len(site.peers) == 2 assert "bka4ht2bzxchy44r.onion:1234" in site.peers + + @pytest.mark.slow + def testAnnounce(self, file_server, tor_manager): + file_server.tor_manager = tor_manager + hash1 = hashlib.sha256("1Nekos4fiBqfcazyG1bAxdBT5oBvA76Z").digest() + hash2 = hashlib.sha256("1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr").digest() + peer = Peer("zero.booth.moe", 443, connection_server=file_server) + assert peer.request("ping") + peer = Peer("boot3rdez4rzn36x.onion", 15441, connection_server=file_server) + assert peer.request("ping") + res = peer.request("announce", { + "hashes": [hash1, hash2], + "port": 15441, "need_types": ["ip4", "onion"], "need_num": 100, "add": [""] + }) + + assert res + + def testBackwardCompatibility(self, file_server, bootstrapper_db): + peer = Peer(file_server.ip, 1544, connection_server=file_server) + hash1 = hashlib.sha256("site1").digest() + + bootstrapper_db.peerAnnounce("ipv4", file_server.ip_external, port=15441, hashes=[hash1], delete_missing_hashes=True) + + # Test with ipv4 need type + res = peer.request("announce", { + "hashes": [hash1], + "port": 15441, "need_types": ["ipv4"], "need_num": 10, "add": [] + }) + + assert len(res["peers"][0]["ipv4"]) == 1 + + # Test with ip4 need type + res = peer.request("announce", { + "hashes": [hash1], + "port": 15441, "need_types": ["ip4"], "need_num": 10, "add": [] + }) + + assert len(res["peers"][0]["ip4"]) == 1 diff --git a/plugins/disabled-Dnschain/SiteManagerPlugin.py b/plugins/disabled-Dnschain/SiteManagerPlugin.py index 9121b4259..a5122ec1e 100644 --- a/plugins/disabled-Dnschain/SiteManagerPlugin.py +++ b/plugins/disabled-Dnschain/SiteManagerPlugin.py @@ -24,7 +24,7 @@ def isAddress(self, address): # Return: True if the address is domain def isDomain(self, address): - return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) + return re.match(r"(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) # Load dns entries from data/dns_cache.json @@ -60,7 +60,7 @@ def resolveDomainDnschainNet(self, domain): return data["zeronet"].get(sub_domain) # Not found return address - except Exception, err: + except Exception as err: log.debug("Dnschain.net %s resolve error: %s" % (domain, Debug.formatException(err))) @@ -74,7 +74,7 @@ def resolveDomainDnschainInfo(self, domain): if not sub_domain: sub_domain = "@" address = None with gevent.Timeout(5, Exception("Timeout: 5s")): - res = Http.get("https://dnschain.info/bit/d/%s" % re.sub("\.bit$", "", top_domain)).read() + res = Http.get("https://dnschain.info/bit/d/%s" % re.sub(r"\.bit$", "", top_domain)).read() data = json.loads(res)["value"] for key, val in data["zeronet"].iteritems(): self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours @@ -82,7 +82,7 @@ def resolveDomainDnschainInfo(self, domain): return data["zeronet"].get(sub_domain) # Not found return address - except Exception, err: + except Exception as err: log.debug("Dnschain.info %s resolve error: %s" % (domain, Debug.formatException(err))) diff --git a/plugins/disabled-Dnschain/UiRequestPlugin.py b/plugins/disabled-Dnschain/UiRequestPlugin.py index e1a095a58..8ab9d5c58 100644 --- a/plugins/disabled-Dnschain/UiRequestPlugin.py +++ b/plugins/disabled-Dnschain/UiRequestPlugin.py @@ -11,7 +11,7 @@ def __init__(self, server = None): # Media request def actionSiteMedia(self, path): - match = re.match("/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) + match = re.match(r"/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) if match: # Its a valid domain, resolve first domain = match.group("address") address = self.site_manager.resolveDomain(domain) @@ -23,7 +23,7 @@ def actionSiteMedia(self, path): # Is mediarequest allowed from that referer def isMediaRequestAllowed(self, site_address, referer): referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address - referer_site_address = re.match("/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") + referer_site_address = re.match(r"/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") if referer_site_address == site_address: # Referer site address as simple address return True diff --git a/plugins/disabled-DonationMessage/DonationMessagePlugin.py b/plugins/disabled-DonationMessage/DonationMessagePlugin.py index 14f95b081..8cf0d5416 100644 --- a/plugins/disabled-DonationMessage/DonationMessagePlugin.py +++ b/plugins/disabled-DonationMessage/DonationMessagePlugin.py @@ -19,4 +19,4 @@ def renderWrapper(self, *args, **kwargs): """ - return re.sub("\s*\s*$", inject_html, body) + return re.sub(r"\s*\s*$", inject_html, body) diff --git a/plugins/disabled-Multiuser/MultiuserPlugin.py b/plugins/disabled-Multiuser/MultiuserPlugin.py index 4698b1594..e3e4b54cc 100644 --- a/plugins/disabled-Multiuser/MultiuserPlugin.py +++ b/plugins/disabled-Multiuser/MultiuserPlugin.py @@ -1,11 +1,17 @@ import re import sys +import json from Config import config from Plugin import PluginManager from Crypt import CryptBitcoin import UserPlugin +try: + local_master_addresses = set(json.load(open("%s/users.json" % config.data_dir)).keys()) # Users in users.json +except Exception, err: + local_master_addresses = set() + @PluginManager.registerTo("UiRequest") class UiRequestPlugin(object): @@ -20,6 +26,7 @@ def actionWrapper(self, path, extra_headers=None): match = re.match("/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) if not match: return False + inner_path = match.group("inner_path").lstrip("/") html_request = "." not in inner_path or inner_path.endswith(".html") # Only inject html to html requests @@ -29,11 +36,20 @@ def actionWrapper(self, path, extra_headers=None): if not user: # No user found by cookie user = self.user_manager.create() user_created = True + else: + user = None + + # Disable new site creation if --multiuser_no_new_sites enabled + if config.multiuser_no_new_sites: + path_parts = self.parsePath(path) + if not self.server.site_manager.get(match.group("address")) and (not user or user.master_address not in local_master_addresses): + self.sendHeader(404) + return self.formatError("Not Found", "Adding new sites disabled on this proxy", details=False) if user_created: if not extra_headers: - extra_headers = [] - extra_headers.append(('Set-Cookie', "master_address=%s;path=/;max-age=2592000;" % user.master_address)) # = 30 days + extra_headers = {} + extra_headers['Set-Cookie'] = "master_address=%s;path=/;max-age=2592000;" % user.master_address # = 30 days loggedin = self.get.get("login") == "done" @@ -42,45 +58,24 @@ def actionWrapper(self, path, extra_headers=None): if not back_generator: # Wrapper error or not string returned, injection not possible return False - if user_created: - back = back_generator.next() - master_seed = user.master_seed - # Inject the welcome message - inject_html = """ - - - - - - """.replace("\t", "") - inject_html = inject_html.replace("{master_seed}", master_seed) # Set the master seed in the message - - return iter([re.sub("\s*\s*$", inject_html, back)]) # Replace the tags with the injection - elif loggedin: back = back_generator.next() inject_html = """ - """.replace("\t", "") + if user.master_address in local_master_addresses: + message = "Hello master!" + else: + message = "Hello again!" + inject_html = inject_html.replace("{message}", message) + inject_html = inject_html.replace("{script_nonce}", self.getScriptNonce()) return iter([re.sub("\s*\s*$", inject_html, back)]) # Replace the tags with the injection else: # No injection necessary @@ -99,6 +94,19 @@ def getCurrentUser(self): @PluginManager.registerTo("UiWebsocket") class UiWebsocketPlugin(object): + def __init__(self, *args, **kwargs): + self.multiuser_denied_cmds = ( + "sitePause", "siteResume", "siteDelete", "configSet", "serverShutdown", "serverUpdate", "siteClone", + "siteSetOwned", "siteSetAutodownloadoptional", "dbReload", "dbRebuild", + "mergerSiteDelete", "siteSetLimit", "siteSetAutodownloadBigfileLimit", + "optionalLimitSet", "optionalHelp", "optionalHelpRemove", "optionalHelpAll", "optionalFilePin", "optionalFileUnpin", "optionalFileDelete", + "muteAdd", "muteRemove", "siteblockAdd", "siteblockRemove", "filterIncludeAdd", "filterIncludeRemove" + ) + if config.multiuser_no_new_sites: + self.multiuser_denied_cmds += ("mergerSiteAdd", ) + + super(UiWebsocketPlugin, self).__init__(*args, **kwargs) + # Let the page know we running in multiuser mode def formatServerInfo(self): server_info = super(UiWebsocketPlugin, self).formatServerInfo() @@ -120,9 +128,12 @@ def actionUserShowMasterSeed(self, to): def actionUserLogout(self, to): if "ADMIN" not in self.site.settings["permissions"]: return self.response(to, "Logout not allowed") - message = "You have been logged out. Login to another account" - message += "" + message = "You have been logged out. Login to another account" self.cmd("notification", ["done", message, 1000000]) # 1000000 = Show ~forever :) + + script = "document.cookie = 'master_address=; expires=Thu, 01 Jan 1970 00:00:00 UTC; path=/';" + script += "$('#button_notification').on('click', function() { zeroframe.cmd(\"userLoginForm\", []); });" + self.cmd("injectScript", script) # Delete from user_manager user_manager = sys.modules["User.UserManager"].user_manager if self.user.master_address in user_manager.users: @@ -143,44 +154,67 @@ def responseUserLogin(self, master_seed): if not user: user = user_manager.create(master_seed=master_seed) if user.master_address: - message = "Successfull login, reloading page..." - message += "" % user.master_address - message += "" - self.cmd("notification", ["done", message]) + script = "document.cookie = 'master_address=%s;path=/;max-age=2592000;';" % user.master_address + script += "zeroframe.cmd('wrapperReload', ['login=done']);" + self.cmd("notification", ["done", "Successful login, reloading page..."]) + self.cmd("injectScript", script) else: self.cmd("notification", ["error", "Error: Invalid master seed"]) self.actionUserLoginForm(0) - # Disable not Multiuser safe functions - def actionSiteDelete(self, to, *args, **kwargs): - if not config.multiuser_local: - self.cmd("notification", ["info", "This function is disabled on this proxy"]) - else: - return super(UiWebsocketPlugin, self).actionSiteDelete(to, *args, **kwargs) - - def actionConfigSet(self, to, *args, **kwargs): - if not config.multiuser_local: - self.cmd("notification", ["info", "This function is disabled on this proxy"]) - else: - return super(UiWebsocketPlugin, self).actionConfigSet(to, *args, **kwargs) - - def actionServerShutdown(self, to, *args, **kwargs): - if not config.multiuser_local: - self.cmd("notification", ["info", "This function is disabled on this proxy"]) + def hasCmdPermission(self, cmd): + cmd = cmd[0].lower() + cmd[1:] + if not config.multiuser_local and self.user.master_address not in local_master_addresses and cmd in self.multiuser_denied_cmds: + self.cmd("notification", ["info", "This function is disabled on this proxy!"]) + return False else: - return super(UiWebsocketPlugin, self).actionServerShutdown(to, *args, **kwargs) + return super(UiWebsocketPlugin, self).hasCmdPermission(cmd) + + def actionCertAdd(self, *args, **kwargs): + super(UiWebsocketPlugin, self).actionCertAdd(*args, **kwargs) + master_seed = self.user.master_seed + message = """ + + Hello, welcome to ZeroProxy!
    A new, unique account created for you:
    + + +
    + This is your private key, save it, so you can login next time.
    + Warning: Without this key, your account will be lost forever! +

    + Ok, Saved it!

    + This site allows you to browse ZeroNet content, but if you want to secure your account
    + and help to keep the network alive, then please run your own ZeroNet client.
    + """ - def actionServerUpdate(self, to, *args, **kwargs): - if not config.multiuser_local: - self.cmd("notification", ["info", "This function is disabled on this proxy"]) - else: - return super(UiWebsocketPlugin, self).actionServerUpdate(to, *args, **kwargs) + self.cmd("notification", ["info", message]) - def actionSiteClone(self, to, *args, **kwargs): - if not config.multiuser_local: - self.cmd("notification", ["info", "This function is disabled on this proxy"]) + script = """ + $("#button_notification_masterseed").on("click", function() { + this.value = "{master_seed}"; this.setSelectionRange(0,100); + }) + $("#button_notification_download").on("mousedown", function() { + this.href = window.URL.createObjectURL(new Blob(["ZeroNet user master seed:\\r\\n{master_seed}"])) + }) + """.replace("{master_seed}", master_seed) + self.cmd("injectScript", script) + + + def actionPermissionAdd(self, to, permission): + if permission == "NOSANDBOX": + self.cmd("notification", ["info", "You can't disable sandbox on this proxy!"]) + self.response(to, {"error": "Denied by proxy"}) + return False else: - return super(UiWebsocketPlugin, self).actionSiteClone(to, *args, **kwargs) + return super(UiWebsocketPlugin, self).actionPermissionAdd(to, permission) @PluginManager.registerTo("ConfigPlugin") @@ -188,5 +222,6 @@ class ConfigPlugin(object): def createArguments(self): group = self.parser.add_argument_group("Multiuser plugin") group.add_argument('--multiuser_local', help="Enable unsafe Ui functions and write users to disk", action='store_true') + group.add_argument('--multiuser_no_new_sites', help="Denies adding new sites by normal users", action='store_true') return super(ConfigPlugin, self).createArguments() diff --git a/plugins/disabled-StemPort/StemPortPlugin.py b/plugins/disabled-StemPort/StemPortPlugin.py new file mode 100644 index 000000000..3a3787c7b --- /dev/null +++ b/plugins/disabled-StemPort/StemPortPlugin.py @@ -0,0 +1,135 @@ +import logging +import traceback + +import socket +import stem +from stem import Signal +from stem.control import Controller +from stem.socket import ControlPort + +from Plugin import PluginManager +from Config import config +from Debug import Debug + +if config.tor != "disable": + from gevent import monkey + monkey.patch_time() + monkey.patch_socket(dns=False) + monkey.patch_thread() + print "Stem Port Plugin: modules are patched." +else: + print "Stem Port Plugin: Tor mode disabled. Module patching skipped." + + +class PatchedControlPort(ControlPort): + def _make_socket(self): + try: + if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one + control_socket = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) + else: + control_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + + # TODO: repeated code - consider making a separate method + + control_socket.connect((self._control_addr, self._control_port)) + return control_socket + except socket.error as exc: + raise stem.SocketError(exc) + +def from_port(address = '127.0.0.1', port = 'default'): + import stem.connection + + if not stem.util.connection.is_valid_ipv4_address(address): + raise ValueError('Invalid IP address: %s' % address) + elif port != 'default' and not stem.util.connection.is_valid_port(port): + raise ValueError('Invalid port: %s' % port) + + if port == 'default': + raise ValueError('Must specify a port') + else: + control_port = PatchedControlPort(address, port) + + return Controller(control_port) + + +@PluginManager.registerTo("TorManager") +class TorManagerPlugin(object): + + def connectController(self): + self.log.info("Authenticate using Stem... %s:%s" % (self.ip, self.port)) + + try: + with self.lock: + if config.tor_password: + controller = from_port(port=self.port, password=config.tor_password) + else: + controller = from_port(port=self.port) + controller.authenticate() + self.controller = controller + self.status = u"Connected (via Stem)" + except Exception, err: + print("\n") + traceback.print_exc() + print("\n") + + self.controller = None + self.status = u"Error (%s)" % err + self.log.error("Tor stem connect error: %s" % Debug.formatException(err)) + + return self.controller + + + def disconnect(self): + self.controller.close() + self.controller = None + + + def resetCircuits(self): + try: + self.controller.signal(Signal.NEWNYM) + except Exception, err: + self.status = u"Stem reset circuits error (%s)" % err + self.log.error("Stem reset circuits error: %s" % err) + + + def makeOnionAndKey(self): + try: + service = self.controller.create_ephemeral_hidden_service( + {self.fileserver_port: self.fileserver_port}, + await_publication = False + ) + if service.private_key_type != "RSA1024": + raise Exception("ZeroNet doesn't support crypto " + service.private_key_type) + + self.log.debug("Stem created %s.onion (async descriptor publication)" % service.service_id) + + return (service.service_id, service.private_key) + + except Exception, err: + self.status = u"AddOnion error (Stem: %s)" % err + self.log.error("Failed to create hidden service with Stem: " + err) + return False + + + def delOnion(self, address): + try: + self.controller.remove_ephemeral_hidden_service(address) + return True + except Exception, err: + self.status = u"DelOnion error (Stem: %s)" % err + self.log.error("Stem failed to delete %s.onion: %s" % (address, err)) + self.disconnect() # Why? + return False + + + def request(self, cmd): + with self.lock: + if not self.enabled: + return False + else: + self.log.error("[WARNING] StemPort self.request should not be called") + return "" + + def send(self, cmd, conn=None): + self.log.error("[WARNING] StemPort self.send should not be called") + return "" diff --git a/plugins/disabled-StemPort/__init__.py b/plugins/disabled-StemPort/__init__.py new file mode 100644 index 000000000..71150ad64 --- /dev/null +++ b/plugins/disabled-StemPort/__init__.py @@ -0,0 +1,10 @@ +try: + from stem.control import Controller + stem_found = True +except Exception as err: + print "STEM NOT FOUND! %s" % err + stem_found = False + +if stem_found: + print "Starting Stem plugin..." + import StemPortPlugin diff --git a/plugins/disabled-UiPassword/UiPasswordPlugin.py b/plugins/disabled-UiPassword/UiPasswordPlugin.py index a0e42e817..6e746fd46 100644 --- a/plugins/disabled-UiPassword/UiPasswordPlugin.py +++ b/plugins/disabled-UiPassword/UiPasswordPlugin.py @@ -11,12 +11,23 @@ sessions = {} +def showPasswordAdvice(password): + error_msgs = [] + if not password or not isinstance(password, (str, unicode)): + error_msgs.append("You have enabled UiPassword plugin, but you forgot to set a password!") + elif len(password) < 8: + error_msgs.append("You are using a very short UI password!") + return error_msgs + @PluginManager.registerTo("UiRequest") class UiRequestPlugin(object): sessions = sessions last_cleanup = time.time() def route(self, path): + # Restict Ui access by ip + if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: + return self.error403(details=False) if path.endswith("favicon.ico"): return self.actionFile("src/Ui/media/img/favicon.ico") else: @@ -45,7 +56,7 @@ def actionLogin(self): # Redirect to homepage or referer url = self.env.get("HTTP_REFERER", "") - if not url or re.sub("\?.*", "", url).endswith("/Login"): + if not url or re.sub(r"\?.*", "", url).endswith("/Login"): url = "/" + config.homepage cookie_header = ('Set-Cookie', "session_id=%s;path=/;max-age=2592000;" % session_id) # Max age = 30 days self.start_response('301 Redirect', [('Location', url), cookie_header]) @@ -57,13 +68,10 @@ def actionLogin(self): yield template def checkPassword(self, password): - if password == config.ui_password: - return True - else: - return False + return password == config.ui_password - def randomString(self, chars): - return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(chars)) + def randomString(self, nchars): + return ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(nchars)) @classmethod def cleanup(cls): @@ -97,6 +105,7 @@ def actionLogout(self): yield "Error: Invalid session id" + @PluginManager.registerTo("ConfigPlugin") class ConfigPlugin(object): def createArguments(self): @@ -106,6 +115,7 @@ def createArguments(self): return super(ConfigPlugin, self).createArguments() +from Translate import translate as lang @PluginManager.registerTo("UiWebsocket") class UiWebsocketPlugin(object): def actionUiLogout(self, to): @@ -114,5 +124,11 @@ def actionUiLogout(self, to): return self.response(to, "You don't have permission to run this command") session_id = self.request.getCookies().get("session_id", "") - message = "" % session_id - self.cmd("notification", ["done", message]) + self.cmd("redirect", '/Logout?session_id=%s' % session_id) + + def addHomepageNotifications(self): + error_msgs = showPasswordAdvice(config.ui_password) + for msg in error_msgs: + self.site.notifications.append(["error", lang[msg]]) + + return super(UiWebsocketPlugin, self).addHomepageNotifications() diff --git a/plugins/disabled-UiPassword/login.html b/plugins/disabled-UiPassword/login.html index ed16edbdb..12d0889db 100644 --- a/plugins/disabled-UiPassword/login.html +++ b/plugins/disabled-UiPassword/login.html @@ -74,7 +74,7 @@ - \ No newline at end of file + diff --git a/plugins/disabled-Zeroname-local/SiteManagerPlugin.py b/plugins/disabled-Zeroname-local/SiteManagerPlugin.py index a25c7da82..e8fc8610c 100644 --- a/plugins/disabled-Zeroname-local/SiteManagerPlugin.py +++ b/plugins/disabled-Zeroname-local/SiteManagerPlugin.py @@ -25,7 +25,7 @@ def isAddress(self, address): # Return: True if the address is domain def isDomain(self, address): - return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) + return re.match(r"(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address) # Resolve domain diff --git a/plugins/disabled-Zeroname-local/UiRequestPlugin.py b/plugins/disabled-Zeroname-local/UiRequestPlugin.py index 462e485f3..df37e0667 100644 --- a/plugins/disabled-Zeroname-local/UiRequestPlugin.py +++ b/plugins/disabled-Zeroname-local/UiRequestPlugin.py @@ -11,7 +11,7 @@ def __init__(self, *args, **kwargs): # Media request def actionSiteMedia(self, path): - match = re.match("/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) + match = re.match(r"/media/(?P
    [A-Za-z0-9-]+\.[A-Za-z0-9\.-]+)(?P/.*|$)", path) if match: # Its a valid domain, resolve first domain = match.group("address") address = self.site_manager.resolveDomain(domain) @@ -23,13 +23,13 @@ def actionSiteMedia(self, path): # Is mediarequest allowed from that referer def isMediaRequestAllowed(self, site_address, referer): referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address - referer_path = re.sub("\?.*", "", referer_path) # Remove http params + referer_path = re.sub(r"\?.*", "", referer_path) # Remove http params if self.isProxyRequest(): # Match to site domain referer = re.sub("^http://zero[/]+", "http://", referer) # Allow /zero access referer_site_address = re.match("http[s]{0,1}://(.*?)(/|$)", referer).group(1) else: # Match to request path - referer_site_address = re.match("/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") + referer_site_address = re.match(r"/(?P
    [A-Za-z0-9\.-]+)(?P/.*|$)", referer_path).group("address") if referer_site_address == site_address: # Referer site address as simple address return True diff --git a/requirements.txt b/requirements.txt index eef988d04..e5cfb71ea 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,2 @@ gevent>=1.1.0 -msgpack-python>=0.4.4 +msgpack>=0.4.4 diff --git a/src/Config.py b/src/Config.py index cddffa6da..aab299fd7 100644 --- a/src/Config.py +++ b/src/Config.py @@ -1,17 +1,32 @@ import argparse import sys import os +import locale +import re import ConfigParser +import logging +import logging.handlers +import stat class Config(object): def __init__(self, argv): - self.version = "0.5.0" - self.rev = 1700 + self.version = "0.6.5" + self.rev = 3870 self.argv = argv self.action = None + self.pending_changes = {} + self.need_restart = False + self.keys_api_change_allowed = set([ + "tor", "fileserver_port", "language", "tor_use_bridges", "trackers_proxy", "trackers", + "trackers_file", "open_browser", "log_level", "fileserver_ip_type", "ip_external" + ]) + self.keys_restart_need = set(["tor", "fileserver_port", "fileserver_ip_type"]) + self.start_dir = self.getStartDir() + self.config_file = "zeronet.conf" + self.trackers_file = False self.createParser() self.createArguments() @@ -28,17 +43,41 @@ def __str__(self): def strToBool(self, v): return v.lower() in ("yes", "true", "t", "1") + def getStartDir(self): + this_file = os.path.abspath(__file__).replace("\\", "/").rstrip("cd") + + if this_file.endswith("/Contents/Resources/core/src/Config.py"): + # Running as ZeroNet.app + if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")): + # Runnig from non-writeable directory, put data to Application Support + start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet").decode(sys.getfilesystemencoding()) + else: + # Running from writeable directory put data next to .app + start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file).decode(sys.getfilesystemencoding()) + elif this_file.endswith("/core/src/Config.py"): + # Running as exe or source is at Application Support directory, put var files to outside of core dir + start_dir = this_file.replace("/core/src/Config.py", "").decode(sys.getfilesystemencoding()) + elif this_file.endswith("usr/share/zeronet/src/Config.py"): + # Running from non-writeable location, e.g., AppImage + start_dir = os.path.expanduser("~/ZeroNet").decode(sys.getfilesystemencoding()) + else: + start_dir = "." + + return start_dir + # Create command line arguments def createArguments(self): trackers = [ "zero://boot3rdez4rzn36x.onion:15441", - "zero://boot.zeronet.io#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:15441", - "udp://tracker.coppersurfer.tk:6969", - "udp://tracker.leechers-paradise.org:6969", - "udp://9.rarbg.com:2710", - "http://tracker.tordb.ml:6881/announce", - "http://explodie.org:6969/announce", - "http://tracker1.wasabii.com.tw:6969/announce" + "zero://zero.booth.moe#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:443", # US/NY + "udp://tracker.coppersurfer.tk:6969", # DE + "udp://tracker.port443.xyz:6969", # UK + "udp://104.238.198.186:8000", # US/LA + "http://tracker2.itzmx.com:6961/announce", # US/LA + "http://open.acgnxtracker.com:80/announce", # DE + "http://open.trackerlist.xyz:80/announce", # Cloudflare + "https://1.tracker.eu.org:443/announce", # Google App Engine + "zero://2602:ffc5::c5b2:5360:26312" # US/ATL ] # Platform specific if sys.platform.startswith("win"): @@ -46,8 +85,27 @@ def createArguments(self): else: coffeescript = None + try: + language, enc = locale.getdefaultlocale() + language = language.lower().replace("_", "-") + if language not in ["pt-br", "zh-tw"]: + language = language.split("-")[0] + except Exception: + language = "en" + use_openssl = True + if repr(1483108852.565) != "1483108852.565": # Fix for weird Android issue + fix_float_decimals = True + else: + fix_float_decimals = False + + config_file = self.start_dir + "/zeronet.conf" + data_dir = self.start_dir + "/data" + log_dir = self.start_dir + "/log" + + ip_local = ["127.0.0.1", "::1"] + # Main action = self.subparsers.add_parser("main", help='Start UiServer and FileServer (default)') @@ -69,6 +127,7 @@ def createArguments(self): action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?') action.add_argument('--inner_path', help='File you want to sign (default: content.json)', default="content.json", metavar="inner_path") + action.add_argument('--remove_missing_optional', help='Remove optional files that is not present in the directory', action='store_true') action.add_argument('--publish', help='Publish site after the signing', action='store_true') # SitePublish @@ -85,6 +144,12 @@ def createArguments(self): action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address') action.add_argument('address', help='Site to verify') + # SiteCmd + action = self.subparsers.add_parser("siteCmd", help='Execute a ZeroFrame API command on a site') + action.add_argument('address', help='Site address') + action.add_argument('cmd', help='API command name') + action.add_argument('parameters', help='Parameters of the command', nargs='?') + # dbRebuild action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache') action.add_argument('address', help='Site to rebuild') @@ -119,50 +184,90 @@ def createArguments(self): action.add_argument('message', help='Message to sign') action.add_argument('privatekey', help='Private key') + # Crypt Verify + action = self.subparsers.add_parser("cryptVerify", help='Verify message using Bitcoin public address') + action.add_argument('message', help='Message to verify') + action.add_argument('sign', help='Signiture for message') + action.add_argument('address', help='Signer\'s address') + + # Crypt GetPrivatekey + action = self.subparsers.add_parser("cryptGetPrivatekey", help='Generate a privatekey from master seed') + action.add_argument('master_seed', help='Source master seed') + action.add_argument('site_address_index', help='Site address index', type=int) + + action = self.subparsers.add_parser("getConfig", help='Return json-encoded info') + action = self.subparsers.add_parser("testConnection", help='Testing') + action = self.subparsers.add_parser("testAnnounce", help='Testing') + # Config parameters self.parser.add_argument('--verbose', help='More detailed logging', action='store_true') self.parser.add_argument('--debug', help='Debug mode', action='store_true') + self.parser.add_argument('--silent', help='Disable logging to terminal output', action='store_true') self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true') - self.parser.add_argument('--debug_gevent', help='Debug gevent functions', action='store_true') self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true') - self.parser.add_argument('--config_file', help='Path of config file', default="zeronet.conf", metavar="path") - self.parser.add_argument('--data_dir', help='Path of data directory', default="data", metavar="path") - self.parser.add_argument('--log_dir', help='Path of logging directory', default="log", metavar="path") + self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path") + self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path") + + self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path") + self.parser.add_argument('--log_level', help='Level of logging to file', default="DEBUG", choices=["DEBUG", "INFO", "ERROR"]) + self.parser.add_argument('--log_rotate', help='Log rotate interval', default="daily", choices=["hourly", "daily", "weekly", "off"]) + self.parser.add_argument('--log_rotate_backup_count', help='Log rotate backup count', default=5, type=int) + self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language') self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip') self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port') self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*') + self.parser.add_argument('--ui_host', help='Allow access using this hosts', metavar='host', nargs='*') + self.parser.add_argument('--ui_trans_proxy', help='Allow access using a transparent proxy', action='store_true') + self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically', nargs='?', const="default_browser", metavar='browser_name') self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D', metavar='address') - self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='size') - self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=10, type=int, metavar='connected_limit') + self.parser.add_argument('--updatesite', help='Source code update site', default='1UPDatEDxnvHDo7TXvq6AEBARfNkyfxsp', + metavar='address') + self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='limit') + self.parser.add_argument('--file_size_limit', help='Maximum per file size limit in MB', default=10, type=int, metavar='limit') + self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit') + self.parser.add_argument('--global_connected_limit', help='Max connections', default=512, type=int, metavar='global_connected_limit') + self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers') self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip') - self.parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port') + self.parser.add_argument('--fileserver_port', help='FileServer bind port (0: randomize)', default=0, type=int, metavar='port') + self.parser.add_argument('--fileserver_port_range', help='FileServer randomization range', default="10000-40000", metavar='port') + self.parser.add_argument('--fileserver_ip_type', help='FileServer ip type', default="dual", choices=["ipv4", "ipv6", "dual"]) + self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*') + self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip', nargs='*') + self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true') self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port') - self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip') + self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip') self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*') self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', default=False, metavar='path') + self.parser.add_argument('--trackers_proxy', help='Force use proxy to connect to trackers (disable, tor, ip:port)', default="disable") self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup', type='bool', choices=[True, False], default=use_openssl) self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true') self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true') + self.parser.add_argument('--force_encryption', help="Enforce encryption to all peer connections", action='store_true') self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory', type='bool', choices=[True, False], default=True) self.parser.add_argument('--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true') self.parser.add_argument('--max_files_opened', help='Change maximum opened files allowed by OS to this value on startup', default=2048, type=int, metavar='limit') + self.parser.add_argument('--stack_size', help='Change thread stack size', default=None, type=int, metavar='thread_stack_size') self.parser.add_argument('--use_tempfiles', help='Use temporary files when downloading (experimental)', type='bool', choices=[True, False], default=False) self.parser.add_argument('--stream_downloads', help='Stream download directly to files (experimental)', type='bool', choices=[True, False], default=False) self.parser.add_argument("--msgpack_purepython", help='Use less memory, but a bit more CPU power', - type='bool', choices=[True, False], default=True) + type='bool', choices=[True, False], default=False) + self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification', + type='bool', choices=[True, False], default=fix_float_decimals) + self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed") + self.parser.add_argument("--download_optional", choices=["manual", "auto"], default="manual") self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript, metavar='executable_path') @@ -170,16 +275,36 @@ def createArguments(self): self.parser.add_argument('--tor', help='enable: Use only for Tor peers, always: Use Tor for every connection', choices=["disable", "enable", "always"], default='enable') self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051') self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050') + self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password') + self.parser.add_argument('--tor_use_bridges', help='Use obfuscated bridge relays to avoid Tor block', action='store_true') + self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services in Tor always mode', metavar='limit', type=int, default=10) + self.parser.add_argument('--tor_hs_port', help='Hidden service port in Tor always mode', metavar='limit', type=int, default=15441) self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev)) + self.parser.add_argument('--end', help='Stop multi value argument parsing', action='store_true') return self.parser def loadTrackersFile(self): - self.trackers = [] - for tracker in open(self.trackers_file): - if "://" in tracker: - self.trackers.append(tracker.strip()) + if not self.trackers_file: + return None + + self.trackers = self.arguments.trackers[:] + + try: + if self.trackers_file.startswith("/"): # Absolute + trackers_file_path = self.trackers_file + elif self.trackers_file.startswith("{data_dir}"): # Relative to data_dir + trackers_file_path = self.trackers_file.replace("{data_dir}", self.data_dir) + else: # Relative to zeronet.py + trackers_file_path = self.start_dir + "/" + self.trackers_file + + for line in open(trackers_file_path): + tracker = line.strip() + if "://" in tracker and tracker not in self.trackers: + self.trackers.append(tracker) + except Exception as err: + print "Error loading trackers file: %s" % err # Find arguments specified for current action def getActionArguments(self): @@ -234,22 +359,32 @@ def silencer(parser, function_name): self.parser.exit = lambda *args, **kwargs: silencer(self.parser, "exit") argv = self.argv[:] # Copy command line arguments + self.parseCommandline(argv, silent) # Parse argv + self.setAttributes() if parse_config: argv = self.parseConfig(argv) # Add arguments from config file + self.parseCommandline(argv, silent) # Parse argv self.setAttributes() + if not silent: + if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local: + self.ip_local.append(self.fileserver_ip) + if silent: # Restore original functions if self.parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action self.action = None self.parser._print_message = original_print_message self.parser.exit = original_exit + self.loadTrackersFile() + # Parse command line arguments def parseCommandline(self, argv, silent=False): # Find out if action is specificed on start action = self.getAction(argv) if not action: + argv.append("--end") argv.append("main") action = "main" argv = self.moveUnknownToEnd(argv, action) @@ -273,12 +408,24 @@ def parseConfig(self, argv): config.read(self.config_file) for section in config.sections(): for key, val in config.items(section): + if val == "True": + val = None if section != "global": # If not global prefix key with section key = section + "_" + key + + if key == "open_browser": # Prefer config file value over cli argument + if "--%s" % key in argv: + pos = argv.index("--open_browser") + del argv[pos:pos + 2] + + argv_extend = ["--%s" % key] if val: for line in val.strip().split("\n"): # Allow multi-line values - argv.insert(1, line) - argv.insert(1, "--%s" % key) + argv_extend.append(line) + if "\n" in val: + argv_extend.append("--end") + + argv = argv[:1] + argv_extend + argv[1:] return argv # Expose arguments as class attributes @@ -287,6 +434,10 @@ def setAttributes(self): if self.arguments: args = vars(self.arguments) for key, val in args.items(): + if type(val) is list: + val = val[:] + if key in ("data_dir", "log_dir"): + val = val.replace("\\", "/") setattr(self, key, val) def loadPlugins(self): @@ -316,15 +467,27 @@ def saveValue(self, key, value): for line in lines: if line.strip() == "[global]": global_line_i = i - if line.startswith(key + " = "): + if line.startswith(key + " ="): key_line_i = i i += 1 + if key_line_i and len(lines) > key_line_i + 1: + while True: # Delete previous multiline values + is_value_line = lines[key_line_i + 1].startswith(" ") or lines[key_line_i + 1].startswith("\t") + if not is_value_line: + break + del lines[key_line_i + 1] + if value is None: # Delete line if key_line_i: del lines[key_line_i] + else: # Add / update - new_line = "%s = %s" % (key, str(value).replace("\n", "").replace("\r", "")) + if type(value) is list: + value_lines = [""] + [str(line).replace("\n", "").replace("\r", "") for line in value] + else: + value_lines = [str(value).replace("\n", "").replace("\r", "")] + new_line = "%s = %s" % (key, "\n ".join(value_lines)) if key_line_i: # Already in the config, change the line lines[key_line_i] = new_line elif global_line_i is None: # No global section yet, append to end of file @@ -335,4 +498,87 @@ def saveValue(self, key, value): open(self.config_file, "w").write("\n".join(lines)) + def getServerInfo(self): + from Plugin import PluginManager + + info = { + "platform": sys.platform, + "fileserver_ip": self.fileserver_ip, + "fileserver_port": self.fileserver_port, + "ui_ip": self.ui_ip, + "ui_port": self.ui_port, + "version": self.version, + "rev": self.rev, + "language": self.language, + "debug": self.debug, + "plugins": PluginManager.plugin_manager.plugin_names, + + "log_dir": os.path.abspath(self.log_dir), + "data_dir": os.path.abspath(self.data_dir), + "src_dir": os.path.dirname(os.path.abspath(__file__)) + } + + try: + info["ip_external"] = sys.modules["main"].file_server.port_opened + info["tor_enabled"] = sys.modules["main"].file_server.tor_manager.enabled + info["tor_status"] = sys.modules["main"].file_server.tor_manager.status + except: + pass + + return info + + def initConsoleLogger(self): + if self.action == "main": + format = '[%(asctime)s] %(name)s %(message)s' + else: + format = '%(name)s %(message)s' + + if self.silent: + level = logging.ERROR + elif self.debug: + level = logging.DEBUG + else: + level = logging.INFO + + console_logger = logging.StreamHandler() + console_logger.setFormatter(logging.Formatter(format, "%H:%M:%S")) + console_logger.setLevel(level) + logging.getLogger('').addHandler(console_logger) + + def initFileLogger(self): + if self.action == "main": + log_file_path = "%s/debug.log" % self.log_dir + else: + log_file_path = "%s/cmd.log" % self.log_dir + if self.log_rotate == "off": + file_logger = logging.FileHandler(log_file_path) + else: + when_names = {"weekly": "w", "daily": "d", "hourly": "h"} + file_logger = logging.handlers.TimedRotatingFileHandler( + log_file_path, when=when_names[self.log_rotate], interval=1, backupCount=self.log_rotate_backup_count + ) + file_logger.doRollover() # Always start with empty log file + file_logger.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)-8s %(name)s %(message)s')) + file_logger.setLevel(logging.getLevelName(self.log_level)) + logging.getLogger('').setLevel(logging.getLevelName(self.log_level)) + logging.getLogger('').addHandler(file_logger) + + def initLogging(self): + # Create necessary files and dirs + if not os.path.isdir(self.log_dir): + os.mkdir(self.log_dir) + try: + os.chmod(self.log_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) + except Exception as err: + print "Can't change permission of %s: %s" % (self.log_dir, err) + + # Make warning hidden from console + logging.WARNING = 15 # Don't display warnings if not in debug mode + logging.addLevelName(15, "WARNING") + + logging.getLogger('').name = "-" # Remove root prefix + + self.initConsoleLogger() + self.initFileLogger() + config = Config(sys.argv) diff --git a/src/Connection/Connection.py b/src/Connection/Connection.py index 5fe06a978..4edd33a29 100644 --- a/src/Connection/Connection.py +++ b/src/Connection/Connection.py @@ -1,35 +1,49 @@ import socket import time +import random import gevent import msgpack +import msgpack.fallback +try: + from gevent.coros import RLock +except: + from gevent.lock import RLock from Config import config from Debug import Debug from util import StreamingMsgpack from Crypt import CryptConnection +from util import helper class Connection(object): __slots__ = ( - "sock", "sock_wrapped", "ip", "port", "cert_pin", "site_lock", "id", "protocol", "type", "server", "unpacker", "req_id", - "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "last_recv_time", - "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "cpu_time", - "last_ping_delay", "last_req_time", "last_cmd", "bad_actions", "sites", "name", "updateName", "waiting_requests", "waiting_streams" + "sock", "sock_wrapped", "ip", "port", "cert_pin", "target_onion", "id", "protocol", "type", "server", "unpacker", "req_id", "ip_type", + "handshake", "crypt", "connected", "event_connected", "closed", "start_time", "handshake_time", "last_recv_time", "is_private_ip", "is_tracker_connection", + "last_message_time", "last_send_time", "last_sent_time", "incomplete_buff_recv", "bytes_recv", "bytes_sent", "cpu_time", "send_lock", + "last_ping_delay", "last_req_time", "last_cmd_sent", "last_cmd_recv", "bad_actions", "sites", "name", "updateName", "waiting_requests", "waiting_streams" ) - def __init__(self, server, ip, port, sock=None, site_lock=None): + def __init__(self, server, ip, port, sock=None, target_onion=None, is_tracker_connection=False): self.sock = sock - self.ip = ip - self.port = port self.cert_pin = None if "#" in ip: - self.ip, self.cert_pin = ip.split("#") - self.site_lock = site_lock # Only this site requests allowed (for Tor) + ip, self.cert_pin = ip.split("#") + self.target_onion = target_onion # Requested onion adress self.id = server.last_connection_id server.last_connection_id += 1 self.protocol = "?" self.type = "?" + self.ip_type = "?" + self.port = int(port) + self.setIp(ip) + + if helper.isPrivateIp(self.ip) and self.ip not in config.ip_local: + self.is_private_ip = True + else: + self.is_private_ip = False + self.is_tracker_connection = is_tracker_connection self.server = server self.unpacker = None # Stream incoming socket messages here @@ -44,6 +58,7 @@ def __init__(self, server, ip, port, sock=None, site_lock=None): # Stats self.start_time = time.time() + self.handshake_time = 0 self.last_recv_time = 0 self.last_message_time = 0 self.last_send_time = 0 @@ -53,10 +68,12 @@ def __init__(self, server, ip, port, sock=None, site_lock=None): self.bytes_sent = 0 self.last_ping_delay = None self.last_req_time = 0 - self.last_cmd = None + self.last_cmd_sent = None + self.last_cmd_recv = None self.bad_actions = 0 self.sites = 0 self.cpu_time = 0.0 + self.send_lock = RLock() self.name = None self.updateName() @@ -64,6 +81,18 @@ def __init__(self, server, ip, port, sock=None, site_lock=None): self.waiting_requests = {} # Waiting sent requests self.waiting_streams = {} # Waiting response file streams + def setIp(self, ip): + self.ip = ip + self.ip_type = helper.getIpType(ip) + self.updateName() + + def createSocket(self): + if helper.getIpType(self.ip) == "ipv6" and not hasattr(socket, "socket_noproxy"): + # Create IPv6 connection as IPv4 when using proxy + return socket.socket(socket.AF_INET6, socket.SOCK_STREAM) + else: + return socket.socket(socket.AF_INET, socket.SOCK_STREAM) + def updateName(self): self.name = "Conn#%2s %-12s [%s]" % (self.id, self.ip, self.protocol) @@ -74,44 +103,92 @@ def __repr__(self): return "<%s>" % self.__str__() def log(self, text): - self.server.log.debug("%s > %s" % (self.name, text)) + self.server.log.debug("%s > %s" % (self.name, text.decode("utf8", "ignore"))) + + def getValidSites(self): + return [key for key, val in self.server.tor_manager.site_onions.items() if val == self.target_onion] def badAction(self, weight=1): self.bad_actions += weight + if self.bad_actions > 40: + self.close("Too many bad actions") + elif self.bad_actions > 20: + time.sleep(5) def goodAction(self): self.bad_actions = 0 # Open connection to peer and wait for handshake def connect(self): - self.log("Connecting...") self.type = "out" - if self.ip.endswith(".onion"): + if self.ip_type == "onion": if not self.server.tor_manager or not self.server.tor_manager.enabled: raise Exception("Can't connect to onion addresses, no Tor controller present") self.sock = self.server.tor_manager.createSocket(self.ip, self.port) + elif config.tor == "always" and helper.isPrivateIp(self.ip) and self.ip not in config.ip_local: + raise Exception("Can't connect to local IPs in Tor: always mode") + elif config.trackers_proxy != "disable" and self.is_tracker_connection: + if config.trackers_proxy == "tor": + self.sock = self.server.tor_manager.createSocket(self.ip, self.port) + else: + from lib.PySocks import socks + self.sock = socks.socksocket() + proxy_ip, proxy_port = config.trackers_proxy.split(":") + self.sock.set_proxy(socks.PROXY_TYPE_SOCKS5, proxy_ip, int(proxy_port)) else: - self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.sock.connect((self.ip, int(self.port))) + self.sock = self.createSocket() + + if "TCP_NODELAY" in dir(socket): + self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + timeout_before = self.sock.gettimeout() + self.sock.settimeout(30) + if self.ip_type == "ipv6" and not hasattr(self.sock, "proxy"): + sock_address = (self.ip, self.port, 1, 1) + else: + sock_address = (self.ip, self.port) + + self.sock.connect(sock_address) # Implicit SSL + should_encrypt = not self.ip_type == "onion" and self.ip not in self.server.broken_ssl_ips and self.ip not in config.ip_local if self.cert_pin: self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa", cert_pin=self.cert_pin) self.sock.do_handshake() self.crypt = "tls-rsa" self.sock_wrapped = True + elif should_encrypt and "tls-rsa" in CryptConnection.manager.crypt_supported: + try: + self.sock = CryptConnection.manager.wrapSocket(self.sock, "tls-rsa") + self.sock.do_handshake() + self.crypt = "tls-rsa" + self.sock_wrapped = True + except Exception, err: + if not config.force_encryption: + self.log("Crypt connection error: %s, adding ip %s as broken ssl." % (err, self.ip)) + self.server.broken_ssl_ips[self.ip] = True + self.sock.close() + self.sock = self.createSocket() + self.sock.settimeout(30) + self.sock.connect(sock_address) # Detect protocol - self.send({"cmd": "handshake", "req_id": 0, "params": self.getHandshakeInfo()}) + self.send({"cmd": "handshake", "req_id": 0, "params": self.getHandshakeInfo(), "random": "A" * random.randint(0, 1024)}) event_connected = self.event_connected gevent.spawn(self.messageLoop) - return event_connected.get() # Wait for handshake + connect_res = event_connected.get() # Wait for handshake + self.sock.settimeout(timeout_before) + return connect_res # Handle incoming connection def handleIncomingConnection(self, sock): self.log("Incoming connection...") + + if "TCP_NODELAY" in dir(socket): + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + self.type = "in" - if self.ip != "127.0.0.1": # Clearnet: Check implicit SSL + if self.ip not in config.ip_local: # Clearnet: Check implicit SSL try: if sock.recv(1, gevent.socket.MSG_PEEK) == "\x16": self.log("Crypt in connection using implicit SSL") @@ -131,11 +208,12 @@ def messageLoop(self): self.updateName() self.connected = True buff_len = 0 + req_len = 0 + unpacker_bytes = 0 - self.unpacker = msgpack.Unpacker() try: while not self.closed: - buff = self.sock.recv(16 * 1024) + buff = self.sock.recv(64 * 1024) if not buff: break # Connection closed buff_len = len(buff) @@ -145,77 +223,175 @@ def messageLoop(self): self.incomplete_buff_recv += 1 self.bytes_recv += buff_len self.server.bytes_recv += buff_len + req_len += buff_len if not self.unpacker: - self.unpacker = msgpack.Unpacker() + self.unpacker = msgpack.fallback.Unpacker() + unpacker_bytes = 0 + self.unpacker.feed(buff) - buff = None - for message in self.unpacker: + unpacker_bytes += buff_len + + while True: + try: + message = self.unpacker.next() + except StopIteration: + break + if not type(message) is dict: + if config.debug_socket: + self.log("Invalid message type: %s, content: %r, buffer: %r" % (type(message), message, buff[0:16])) + raise Exception("Invalid message type: %s" % type(message)) + + # Stats self.incomplete_buff_recv = 0 + stat_key = message.get("cmd", "unknown") + if stat_key == "response" and "to" in message: + cmd_sent = self.waiting_requests.get(message["to"], {"cmd": "unknown"})["cmd"] + stat_key = "response: %s" % cmd_sent + if stat_key == "update": + stat_key = "update: %s" % message["params"]["site"] + self.server.stat_recv[stat_key]["bytes"] += req_len + self.server.stat_recv[stat_key]["num"] += 1 + if "stream_bytes" in message: + self.server.stat_recv[stat_key]["bytes"] += message["stream_bytes"] + req_len = 0 + + # Handle message if "stream_bytes" in message: - self.handleStream(message) + buff_left = self.handleStream(message, self.unpacker, buff, unpacker_bytes) + self.unpacker = msgpack.fallback.Unpacker() + self.unpacker.feed(buff_left) + unpacker_bytes = len(buff_left) + if config.debug_socket: + self.log("Start new unpacker with buff_left: %r" % buff_left) else: self.handleMessage(message) message = None - except Exception, err: + except Exception as err: if not self.closed: self.log("Socket error: %s" % Debug.formatException(err)) - self.close() # MessageLoop ended, close connection + self.server.stat_recv["error: %s" % err]["bytes"] += req_len + self.server.stat_recv["error: %s" % err]["num"] += 1 + self.close("MessageLoop ended (closed: %s)" % self.closed) # MessageLoop ended, close connection + + # Stream socket directly to a file + def handleStream(self, message, unpacker, buff, unpacker_bytes): + stream_bytes_left = message["stream_bytes"] + file = self.waiting_streams[message["to"]] + + if "tell" in dir(unpacker): + unpacker_unprocessed_bytes = unpacker_bytes - unpacker.tell() + else: + unpacker_unprocessed_bytes = unpacker._fb_buf_n - unpacker._fb_buf_o + + if unpacker_unprocessed_bytes: # Found stream bytes in unpacker + unpacker_stream_bytes = min(unpacker_unprocessed_bytes, stream_bytes_left) + buff_stream_start = len(buff) - unpacker_unprocessed_bytes + file.write(buff[buff_stream_start:buff_stream_start + unpacker_stream_bytes]) + stream_bytes_left -= unpacker_stream_bytes + else: + unpacker_stream_bytes = 0 + + if config.debug_socket: + self.log( + "Starting stream %s: %s bytes (%s from unpacker, buff size: %s, unprocessed: %s)" % + (message["to"], message["stream_bytes"], unpacker_stream_bytes, len(buff), unpacker_unprocessed_bytes) + ) + + try: + while 1: + if stream_bytes_left <= 0: + break + stream_buff = self.sock.recv(min(64 * 1024, stream_bytes_left)) + if not stream_buff: + break + buff_len = len(stream_buff) + stream_bytes_left -= buff_len + file.write(stream_buff) + + # Statistics + self.last_recv_time = time.time() + self.incomplete_buff_recv += 1 + self.bytes_recv += buff_len + self.server.bytes_recv += buff_len + except Exception, err: + self.log("Stream read error: %s" % Debug.formatException(err)) + + if config.debug_socket: + self.log("End stream %s, file pos: %s" % (message["to"], file.tell())) + + self.incomplete_buff_recv = 0 + self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event + del self.waiting_streams[message["to"]] + del self.waiting_requests[message["to"]] + + if unpacker_stream_bytes: + return buff[buff_stream_start + unpacker_stream_bytes:] + else: + return "" # My handshake info def getHandshakeInfo(self): # No TLS for onion connections - if self.ip.endswith(".onion"): + if self.ip_type == "onion": crypt_supported = [] else: crypt_supported = CryptConnection.manager.crypt_supported # No peer id for onion connections - if self.ip.endswith(".onion") or self.ip == "127.0.0.1": + if self.ip_type == "onion" or self.ip in config.ip_local: peer_id = "" else: peer_id = self.server.peer_id # Setup peer lock from requested onion address - if self.handshake and self.handshake.get("target_ip", "").endswith(".onion"): - target_onion = self.handshake.get("target_ip").replace(".onion", "") # My onion address - onion_sites = {v: k for k, v in self.server.tor_manager.site_onions.items()} # Inverse, Onion: Site address - self.site_lock = onion_sites.get(target_onion) - if not self.site_lock: - self.server.log.warning("Unknown target onion address: %s" % target_onion) - self.site_lock = "unknown" + if self.handshake and self.handshake.get("target_ip", "").endswith(".onion") and self.server.tor_manager.start_onions: + self.target_onion = self.handshake.get("target_ip").replace(".onion", "") # My onion address + if not self.server.tor_manager.site_onions.values(): + self.server.log.warning("Unknown target onion address: %s" % self.target_onion) handshake = { "version": config.version, "protocol": "v2", "peer_id": peer_id, "fileserver_port": self.server.port, - "port_opened": self.server.port_opened, + "port_opened": self.server.port_opened.get(self.ip_type, None), "target_ip": self.ip, "rev": config.rev, "crypt_supported": crypt_supported, - "crypt": self.crypt + "crypt": self.crypt, + "time": int(time.time()) } - if self.site_lock: - handshake["onion"] = self.server.tor_manager.getOnion(self.site_lock) - elif self.ip.endswith(".onion"): + if self.target_onion: + handshake["onion"] = self.target_onion + elif self.ip_type == "onion": handshake["onion"] = self.server.tor_manager.getOnion("global") + if self.is_tracker_connection: + handshake["tracker_connection"] = True + + if config.debug_socket: + self.log("My Handshake: %s" % handshake) + return handshake def setHandshake(self, handshake): + if config.debug_socket: + self.log("Remote Handshake: %s" % handshake) + + if handshake.get("peer_id") == self.server.peer_id and not handshake.get("tracker_connection") and not self.is_tracker_connection: + self.close("Same peer id, can't connect to myself") + self.server.peer_blacklist.append((handshake["target_ip"], handshake["fileserver_port"])) + return False + self.handshake = handshake - if handshake.get("port_opened", None) is False and "onion" not in handshake: # Not connectable + if handshake.get("port_opened", None) is False and "onion" not in handshake and not self.is_private_ip: # Not connectable self.port = 0 else: - self.port = handshake["fileserver_port"] # Set peer fileserver port - - if handshake.get("onion") and not self.ip.endswith(".onion"): # Set incoming connection's onion address - self.ip = handshake["onion"] + ".onion" - self.updateName() + self.port = int(handshake["fileserver_port"]) # Set peer fileserver port # Check if we can encrypt the connection - if handshake.get("crypt_supported") and handshake["peer_id"] not in self.server.broken_ssl_peer_ids: - if self.ip.endswith(".onion"): + if handshake.get("crypt_supported") and self.ip not in self.server.broken_ssl_ips: + if self.ip_type == "onion" or self.ip in config.ip_local: crypt = None elif handshake.get("crypt"): # Recommended crypt by server crypt = handshake["crypt"] @@ -224,18 +400,34 @@ def setHandshake(self, handshake): if crypt: self.crypt = crypt + + if self.type == "in" and handshake.get("onion") and not self.ip_type == "onion": # Set incoming connection's onion address + if self.server.ips.get(self.ip) == self: + del self.server.ips[self.ip] + self.setIp(handshake["onion"] + ".onion") + self.log("Changing ip to %s" % self.ip) + self.server.ips[self.ip] = self + self.updateName() + self.event_connected.set(True) # Mark handshake as done self.event_connected = None + self.handshake_time = time.time() # Handle incoming message def handleMessage(self, message): + try: + cmd = message["cmd"] + except TypeError, AttributeError: + cmd = None + self.last_message_time = time.time() - if message.get("cmd") == "response": # New style response + self.last_cmd_recv = cmd + if cmd == "response": # New style response if message["to"] in self.waiting_requests: - if self.last_send_time: + if self.last_send_time and len(self.waiting_requests) == 1: ping = time.time() - self.last_send_time self.last_ping_delay = ping - self.waiting_requests[message["to"]].set(message) # Set the response to event + self.waiting_requests[message["to"]]["evt"].set(message) # Set the response to event del self.waiting_requests[message["to"]] elif message["to"] == 0: # Other peers handshake ping = time.time() - self.start_time @@ -246,36 +438,33 @@ def handleMessage(self, message): if message.get("crypt") and not self.sock_wrapped: self.crypt = message["crypt"] server = (self.type == "in") - self.log("Crypt out connection using: %s (server side: %s)..." % (self.crypt, server)) + self.log("Crypt out connection using: %s (server side: %s, ping: %.3fs)..." % (self.crypt, server, ping)) self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin) self.sock.do_handshake() self.sock_wrapped = True if not self.sock_wrapped and self.cert_pin: - self.log("Crypt connection error: Socket not encrypted, but certificate pin present") - self.close() + self.close("Crypt connection error: Socket not encrypted, but certificate pin present") return self.setHandshake(message) else: self.log("Unknown response: %s" % message) - elif message.get("cmd"): # Handhsake request - if message["cmd"] == "handshake": + elif cmd: + self.server.num_recv += 1 + if cmd == "handshake": self.handleHandshake(message) else: self.server.handleRequest(self, message) else: # Old style response, no req_id defined - if config.debug_socket: - self.log("Unknown message: %s, waiting: %s" % (message, self.waiting_requests.keys())) + self.log("Unknown message, waiting: %s" % self.waiting_requests.keys()) if self.waiting_requests: last_req_id = min(self.waiting_requests.keys()) # Get the oldest waiting request and set it true - self.waiting_requests[last_req_id].set(message) + self.waiting_requests[last_req_id]["evt"].set(message) del self.waiting_requests[last_req_id] # Remove from waiting request # Incoming handshake set request def handleHandshake(self, message): - if config.debug_socket: - self.log("Handshake request: %s" % message) self.setHandshake(message["params"]) data = self.getHandshakeInfo() data["cmd"] = "response" @@ -289,115 +478,94 @@ def handleHandshake(self, message): self.sock = CryptConnection.manager.wrapSocket(self.sock, self.crypt, server, cert_pin=self.cert_pin) self.sock_wrapped = True except Exception, err: - self.log("Crypt connection error: %s, adding peerid %s as broken ssl." % (err, message["params"]["peer_id"])) - self.server.broken_ssl_peer_ids[message["params"]["peer_id"]] = True - self.close() + if not config.force_encryption: + self.log("Crypt connection error: %s, adding ip %s as broken ssl." % (err, self.ip)) + self.server.broken_ssl_ips[self.ip] = True + self.close("Broken ssl") if not self.sock_wrapped and self.cert_pin: - self.log("Crypt connection error: Socket not encrypted, but certificate pin present") - self.close() - - # Stream socket directly to a file - def handleStream(self, message): - - read_bytes = message["stream_bytes"] # Bytes left we have to read from socket - try: - buff = self.unpacker.read_bytes(min(16 * 1024, read_bytes)) # Check if the unpacker has something left in buffer - except Exception, err: - buff = "" - file = self.waiting_streams[message["to"]] - if buff: - read_bytes -= len(buff) - file.write(buff) - - if config.debug_socket: - self.log("Starting stream %s: %s bytes (%s from unpacker)" % (message["to"], message["stream_bytes"], len(buff))) - - try: - while 1: - if read_bytes <= 0: - break - buff = self.sock.recv(16 * 1024) - if not buff: - break - buff_len = len(buff) - read_bytes -= buff_len - file.write(buff) - - # Statistics - self.last_recv_time = time.time() - self.incomplete_buff_recv += 1 - self.bytes_recv += buff_len - self.server.bytes_recv += buff_len - except Exception, err: - self.log("Stream read error: %s" % Debug.formatException(err)) - - if config.debug_socket: - self.log("End stream %s" % message["to"]) - - self.incomplete_buff_recv = 0 - self.waiting_requests[message["to"]].set(message) # Set the response to event - del self.waiting_streams[message["to"]] - del self.waiting_requests[message["to"]] + self.close("Crypt connection error: Socket not encrypted, but certificate pin present") # Send data to connection def send(self, message, streaming=False): + self.last_send_time = time.time() if config.debug_socket: self.log("Send: %s, to: %s, streaming: %s, site: %s, inner_path: %s, req_id: %s" % ( message.get("cmd"), message.get("to"), streaming, message.get("params", {}).get("site"), message.get("params", {}).get("inner_path"), message.get("req_id")) ) - self.last_send_time = time.time() + + if not self.sock: + self.log("Send error: missing socket") + return False + + if not self.connected and message.get("cmd") != "handshake": + self.log("Wait for handshake before send request") + self.event_connected.get() + try: + stat_key = message.get("cmd", "unknown") + if stat_key == "response": + stat_key = "response: %s" % self.last_cmd_recv + else: + self.server.num_sent += 1 + + self.server.stat_sent[stat_key]["num"] += 1 if streaming: - bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall) - message = None + with self.send_lock: + bytes_sent = StreamingMsgpack.stream(message, self.sock.sendall) self.bytes_sent += bytes_sent self.server.bytes_sent += bytes_sent + self.server.stat_sent[stat_key]["bytes"] += bytes_sent + message = None else: data = msgpack.packb(message) - message = None self.bytes_sent += len(data) self.server.bytes_sent += len(data) - self.sock.sendall(data) + self.server.stat_sent[stat_key]["bytes"] += len(data) + message = None + with self.send_lock: + self.sock.sendall(data) except Exception, err: - self.log("Send errror: %s" % Debug.formatException(err)) - self.close() + self.close("Send error: %s (cmd: %s)" % (err, stat_key)) return False self.last_sent_time = time.time() return True - # Stream raw file to connection + # Stream file to connection without msgpacking def sendRawfile(self, file, read_bytes): buff = 64 * 1024 bytes_left = read_bytes + bytes_sent = 0 while True: self.last_send_time = time.time() - self.sock.sendall( - file.read(min(bytes_left, buff)) - ) + data = file.read(min(bytes_left, buff)) + bytes_sent += len(data) + with self.send_lock: + self.sock.sendall(data) bytes_left -= buff if bytes_left <= 0: break - self.bytes_sent += read_bytes - self.server.bytes_sent += read_bytes + self.bytes_sent += bytes_sent + self.server.bytes_sent += bytes_sent + self.server.stat_sent["raw_file"]["num"] += 1 + self.server.stat_sent["raw_file"]["bytes"] += bytes_sent return True # Create and send a request to peer def request(self, cmd, params={}, stream_to=None): # Last command sent more than 10 sec ago, timeout if self.waiting_requests and self.protocol == "v2" and time.time() - max(self.last_req_time, self.last_recv_time) > 10: - self.log("Request %s timeout: %s" % (self.last_cmd, time.time() - self.last_send_time)) - self.close() + self.close("Request %s timeout: %.3fs" % (self.last_cmd_sent, time.time() - self.last_send_time)) return False self.last_req_time = time.time() - self.last_cmd = cmd + self.last_cmd_sent = cmd self.req_id += 1 data = {"cmd": cmd, "req_id": self.req_id, "params": params} event = gevent.event.AsyncResult() # Create new event for response - self.waiting_requests[self.req_id] = event + self.waiting_requests[self.req_id] = {"evt": event, "cmd": cmd} if stream_to: self.waiting_streams[self.req_id] = stream_to self.send(data) # Send request @@ -419,7 +587,7 @@ def ping(self): return False # Close connection - def close(self): + def close(self, reason="Unknown"): if self.closed: return False # Already closed self.closed = True @@ -427,13 +595,12 @@ def close(self): if self.event_connected: self.event_connected.set(False) - if config.debug_socket: - self.log( - "Closing connection, waiting_requests: %s, buff: %s..." % - (len(self.waiting_requests), self.incomplete_buff_recv) - ) + self.log( + "Closing connection: %s, waiting_requests: %s, sites: %s, buff: %s..." % + (reason, len(self.waiting_requests), self.sites, self.incomplete_buff_recv) + ) for request in self.waiting_requests.values(): # Mark pending requests failed - request.set(False) + request["evt"].set(False) self.waiting_requests = {} self.waiting_streams = {} self.sites = 0 diff --git a/src/Connection/ConnectionServer.py b/src/Connection/ConnectionServer.py index 9d5d859cb..15274a544 100644 --- a/src/Connection/ConnectionServer.py +++ b/src/Connection/ConnectionServer.py @@ -1,83 +1,125 @@ import logging import time import sys +import socket +from collections import defaultdict import gevent import msgpack from gevent.server import StreamServer from gevent.pool import Pool +import util +from util import helper from Debug import Debug from Connection import Connection from Config import config from Crypt import CryptConnection from Crypt import CryptHash from Tor import TorManager +from Site import SiteManager -class ConnectionServer: +class ConnectionServer(object): def __init__(self, ip=None, port=None, request_handler=None): + if not ip: + if config.fileserver_ip_type == "ipv6": + ip = "::1" + else: + ip = "127.0.0.1" + port = 15441 self.ip = ip self.port = port self.last_connection_id = 1 # Connection id incrementer self.log = logging.getLogger("ConnServer") - self.port_opened = None - - if config.tor != "disabled": - self.tor_manager = TorManager(self.ip, self.port) - else: - self.tor_manager = None + self.port_opened = {} + self.peer_blacklist = SiteManager.peer_blacklist + self.tor_manager = TorManager(self.ip, self.port) self.connections = [] # Connections - self.whitelist = ("127.0.0.1",) # No flood protection on this ips + self.whitelist = config.ip_local # No flood protection on this ips self.ip_incoming = {} # Incoming connections from ip in the last minute to avoid connection flood - self.broken_ssl_peer_ids = {} # Peerids of broken ssl connections + self.broken_ssl_ips = {} # Peerids of broken ssl connections self.ips = {} # Connection by ip self.has_internet = True # Internet outage detection - self.running = True - self.thread_checker = gevent.spawn(self.checkConnections) + self.stream_server = None + self.stream_server_proxy = None + self.running = False + self.stat_recv = defaultdict(lambda: defaultdict(int)) + self.stat_sent = defaultdict(lambda: defaultdict(int)) self.bytes_recv = 0 self.bytes_sent = 0 + self.num_recv = 0 + self.num_sent = 0 + + self.num_incoming = 0 + self.num_outgoing = 0 + self.had_external_incoming = False + + self.timecorrection = 0.0 + self.pool = Pool(500) # do not accept more than 500 connections # Bittorrent style peerid - self.peer_id = "-ZN0%s-%s" % (config.version.replace(".", ""), CryptHash.random(12, "base64")) + self.peer_id = "-UT3530-%s" % CryptHash.random(12, "base64") # Check msgpack version if msgpack.version[0] == 0 and msgpack.version[1] < 4: self.log.error( - "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack-python --upgrade`" % + "Error: Unsupported msgpack version: %s (<0.4.0), please run `sudo apt-get install python-pip; sudo pip install msgpack --upgrade`" % str(msgpack.version) ) sys.exit(0) - if port: # Listen server on a port - self.pool = Pool(1000) # do not accept more than 1000 connections - self.stream_server = StreamServer( - (ip.replace("*", ""), port), self.handleIncomingConnection, spawn=self.pool, backlog=500 - ) - if request_handler: - self.handleRequest = request_handler + if request_handler: + self.handleRequest = request_handler - def start(self): + def start(self, check_connections=True): self.running = True + if check_connections: + self.thread_checker = gevent.spawn(self.checkConnections) CryptConnection.manager.loadCerts() + if config.tor != "disable": + self.tor_manager.start() + if not self.port: + self.log.info("No port found, not binding") + return False + self.log.debug("Binding to: %s:%s, (msgpack: %s), supported crypt: %s" % ( - self.ip, self.port, - ".".join(map(str, msgpack.version)), CryptConnection.manager.crypt_supported) - ) + self.ip, self.port, ".".join(map(str, msgpack.version)), + CryptConnection.manager.crypt_supported + )) + try: + self.stream_server = StreamServer( + (self.ip, self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100 + ) + except Exception, err: + self.log.info("StreamServer create error: %s" % Debug.formatException(err)) + + def listen(self): + if self.stream_server_proxy: + gevent.spawn(self.listenProxy) try: - self.stream_server.serve_forever() # Start normal connection server + self.stream_server.serve_forever() except Exception, err: - self.log.info("StreamServer bind error, must be running already: %s" % err) + self.log.info("StreamServer listen error: %s" % err) def stop(self): + self.log.debug("Stopping") self.running = False - self.stream_server.stop() + if self.stream_server: + self.stream_server.stop() def handleIncomingConnection(self, sock, addr): - ip, port = addr + ip, port = addr[0:2] + ip = ip.lower() + if ip.startswith("::ffff:"): # IPv6 to IPv4 mapping + ip = ip.replace("::ffff:", "", 1) + self.num_incoming += 1 + + if not self.had_external_incoming and not helper.isPrivateIp(ip): + self.had_external_incoming = True # Connection flood protection if ip in self.ip_incoming and ip not in self.whitelist: @@ -92,12 +134,22 @@ def handleIncomingConnection(self, sock, addr): connection = Connection(self, ip, port, sock) self.connections.append(connection) - self.ips[ip] = connection + if ip not in config.ip_local: + self.ips[ip] = connection connection.handleIncomingConnection(sock) - def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None): - if ip.endswith(".onion") and self.tor_manager.start_onions and site: # Site-unique connection for Tor - key = ip + site.address + def handleMessage(self, *args, **kwargs): + pass + + def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None, is_tracker_connection=False): + ip_type = helper.getIpType(ip) + has_per_site_onion = (ip.endswith(".onion") or self.port_opened.get(ip_type, None) == False) and self.tor_manager.start_onions and site + if has_per_site_onion: # Site-unique connection for Tor + if ip.endswith(".onion"): + site_onion = self.tor_manager.getOnion(site.address) + else: + site_onion = self.tor_manager.getOnion("global") + key = ip + site_onion else: key = ip @@ -116,7 +168,7 @@ def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None if connection.ip == ip: if peer_id and connection.handshake.get("peer_id") != peer_id: # Does not match continue - if ip.endswith(".onion") and self.tor_manager.start_onions and connection.site_lock != site.address: + if ip.endswith(".onion") and self.tor_manager.start_onions and ip.replace(".onion", "") != connection.target_onion: # For different site continue if not connection.connected and create: @@ -129,34 +181,43 @@ def getConnection(self, ip=None, port=None, peer_id=None, create=True, site=None if create: # Allow to create new connection if not found if port == 0: raise Exception("This peer is not connectable") + + if (ip, port) in self.peer_blacklist and not is_tracker_connection: + raise Exception("This peer is blacklisted") + try: - if ip.endswith(".onion") and self.tor_manager.start_onions and site: # Lock connection to site - connection = Connection(self, ip, port, site_lock=site.address) + if has_per_site_onion: # Lock connection to site + connection = Connection(self, ip, port, target_onion=site_onion, is_tracker_connection=is_tracker_connection) else: - connection = Connection(self, ip, port) + connection = Connection(self, ip, port, is_tracker_connection=is_tracker_connection) + self.num_outgoing += 1 self.ips[key] = connection self.connections.append(connection) + connection.log("Connecting... (site: %s)" % site) succ = connection.connect() if not succ: - connection.close() + connection.close("Connection event return error") raise Exception("Connection event return error") except Exception, err: - self.log.debug("%s Connect error: %s" % (ip, Debug.formatException(err))) - connection.close() + connection.close("%s Connect error: %s" % (ip, Debug.formatException(err))) raise err + + if len(self.connections) > config.global_connected_limit: + gevent.spawn(self.checkMaxConnections) + return connection else: return None def removeConnection(self, connection): - self.log.debug("Removing %s..." % connection) # Delete if same as in registry if self.ips.get(connection.ip) == connection: del self.ips[connection.ip] # Site locked connection - if connection.site_lock and self.ips.get(connection.ip + connection.site_lock) == connection: - del self.ips[connection.ip + connection.site_lock] + if connection.target_onion: + if self.ips.get(connection.ip + connection.target_onion) == connection: + del self.ips[connection.ip + connection.target_onion] # Cert pinned connection if connection.cert_pin and self.ips.get(connection.ip + "#" + connection.cert_pin) == connection: del self.ips[connection.ip + "#" + connection.cert_pin] @@ -168,65 +229,72 @@ def checkConnections(self): run_i = 0 while self.running: run_i += 1 - time.sleep(60) # Check every minute + time.sleep(15) # Check every minute self.ip_incoming = {} # Reset connected ips counter - self.broken_ssl_peer_ids = {} # Reset broken ssl peerids count + self.broken_ssl_ips = {} # Reset broken ssl peerids count last_message_time = 0 + s = time.time() for connection in self.connections[:]: # Make a copy + if connection.ip.endswith(".onion") or config.tor == "always": + timeout_multipler = 2 + else: + timeout_multipler = 1 + idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time) - last_message_time = max(last_message_time, connection.last_message_time) + if connection.last_message_time > last_message_time and not connection.is_private_ip: + # Message from local IPs does not means internet connection + last_message_time = connection.last_message_time if connection.unpacker and idle > 30: # Delete the unpacker if not needed del connection.unpacker connection.unpacker = None - elif connection.last_cmd == "announce" and idle > 20: # Bootstrapper connection close after 20 sec - connection.log("[Cleanup] Tracker connection: %s" % idle) - connection.close() + elif connection.last_cmd_sent == "announce" and idle > 20: # Bootstrapper connection close after 20 sec + connection.close("[Cleanup] Tracker connection, idle: %.3fs" % idle) if idle > 60 * 60: # Wake up after 1h - connection.log("[Cleanup] After wakeup, idle: %s" % idle) - connection.close() + connection.close("[Cleanup] After wakeup, idle: %.3fs" % idle) elif idle > 20 * 60 and connection.last_send_time < time.time() - 10: # Idle more than 20 min and we have not sent request in last 10 sec if not connection.ping(): - connection.close() + connection.close("[Cleanup] Ping timeout") - elif idle > 10 and connection.incomplete_buff_recv > 0: + elif idle > 10 * timeout_multipler and connection.incomplete_buff_recv > 0: # Incomplete data with more than 10 sec idle - connection.log("[Cleanup] Connection buff stalled") - connection.close() + connection.close("[Cleanup] Connection buff stalled") - elif idle > 10 and connection.waiting_requests and time.time() - connection.last_send_time > 10: - # Sent command and no response in 10 sec - connection.log( - "[Cleanup] Command %s timeout: %s" % (connection.last_cmd, time.time() - connection.last_send_time) + elif idle > 10 * timeout_multipler and connection.protocol == "?": # No connection after 10 sec + connection.close( + "[Cleanup] Connect timeout: %.3fs" % idle ) - connection.close() - elif idle > 60 and connection.protocol == "?": # No connection after 1 min - connection.log("[Cleanup] Connect timeout: %s" % idle) - connection.close() + elif idle > 10 * timeout_multipler and connection.waiting_requests and time.time() - connection.last_send_time > 10 * timeout_multipler: + # Sent command and no response in 10 sec + connection.close( + "[Cleanup] Command %s timeout: %.3fs" % (connection.last_cmd_sent, time.time() - connection.last_send_time) + ) elif idle < 60 and connection.bad_actions > 40: - connection.log("[Cleanup] Too many bad actions: %s" % connection.bad_actions) - connection.close() + connection.close( + "[Cleanup] Too many bad actions: %s" % connection.bad_actions + ) - elif idle > 5*60 and connection.sites == 0: - connection.log("[Cleanup] No site for connection") - connection.close() + elif idle > 5 * 60 and connection.sites == 0: + connection.close( + "[Cleanup] No site for connection" + ) - elif run_i % 30 == 0: + elif run_i % 90 == 0: # Reset bad action counter every 30 min connection.bad_actions = 0 # Internet outage detection - if time.time() - last_message_time > max(60, 60*10/max(1,float(len(self.connections))/50)): + if time.time() - last_message_time > max(60, 60 * 10 / max(1, float(len(self.connections)) / 50)): # Offline: Last message more than 60-600sec depending on connection number - if self.has_internet: + if self.has_internet and last_message_time: self.has_internet = False self.onInternetOffline() else: @@ -235,8 +303,49 @@ def checkConnections(self): self.has_internet = True self.onInternetOnline() + self.timecorrection = self.getTimecorrection() + + if time.time() - s > 0.01: + self.log.debug("Connection cleanup in %.3fs" % (time.time() - s)) + self.log.debug("Checkconnections ended") + + @util.Noparallel(blocking=False) + def checkMaxConnections(self): + if len(self.connections) < config.global_connected_limit: + return 0 + + s = time.time() + num_connected_before = len(self.connections) + self.connections.sort(key=lambda connection: connection.sites) + num_closed = 0 + for connection in self.connections: + idle = time.time() - max(connection.last_recv_time, connection.start_time, connection.last_message_time) + if idle > 60: + connection.close("Connection limit reached") + num_closed += 1 + if num_closed > config.global_connected_limit * 0.1: + break + + self.log.debug("Closed %s connections of %s after reached limit %s in %.3fs" % ( + num_closed, num_connected_before, config.global_connected_limit, time.time() - s + )) + return num_closed + def onInternetOnline(self): self.log.info("Internet online") def onInternetOffline(self): + self.had_external_incoming = False self.log.info("Internet offline") + + def getTimecorrection(self): + corrections = sorted([ + connection.handshake.get("time") - connection.handshake_time + connection.last_ping_delay + for connection in self.connections + if connection.handshake.get("time") and connection.last_ping_delay + ]) + if len(corrections) < 6: + return 0.0 + mid = len(corrections) / 2 - 1 + median = (corrections[mid - 1] + corrections[mid] + corrections[mid + 1]) / 3 + return median diff --git a/src/Content/ContentDb.py b/src/Content/ContentDb.py index 83ab59ad7..307b47bb4 100644 --- a/src/Content/ContentDb.py +++ b/src/Content/ContentDb.py @@ -1,8 +1,10 @@ import time +import os from Db import Db from Config import config from Plugin import PluginManager +from Debug import Debug @PluginManager.acceptPlugins @@ -10,8 +12,21 @@ class ContentDb(Db): def __init__(self, path): Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path) self.foreign_keys = True - self.schema = self.getSchema() - self.checkTables() + try: + self.schema = self.getSchema() + self.checkTables() + self.log.debug("Checking foreign keys...") + foreign_key_error = self.execute("PRAGMA foreign_key_check").fetchone() + if foreign_key_error: + raise Exception("Database foreign key error: %s" % foreign_key_error) + except Exception, err: + self.log.error("Error loading content.db: %s, rebuilding..." % Debug.formatException(err)) + self.close() + os.unlink(path) # Remove and try again + Db.__init__(self, {"db_name": "ContentDb", "tables": {}}, path) + self.foreign_keys = True + self.schema = self.getSchema() + self.checkTables() self.site_ids = {} self.sites = {} @@ -82,7 +97,7 @@ def setContent(self, site, inner_path, content, size=0): "size": size, "size_files": sum([val["size"] for key, val in content.get("files", {}).iteritems()]), "size_files_optional": sum([val["size"] for key, val in content.get("files_optional", {}).iteritems()]), - "modified": int(content["modified"]) + "modified": int(content.get("modified", 0)) }, { "site_id": self.site_ids.get(site.address, 0), "inner_path": inner_path @@ -107,21 +122,23 @@ def getTotalSize(self, site, ignore=None): params = {"site_id": self.site_ids.get(site.address, 0)} if ignore: params["not__inner_path"] = ignore - res = self.execute("SELECT SUM(size) + SUM(size_files) AS size FROM content WHERE ?", params) - return res.fetchone()["size"] + res = self.execute("SELECT SUM(size) + SUM(size_files) AS size, SUM(size_files_optional) AS size_optional FROM content WHERE ?", params) + row = dict(res.fetchone()) - def getOptionalSize(self, site): - res = self.execute( - "SELECT SUM(size_files_optional) AS size FROM content WHERE ?", - {"site_id": self.site_ids.get(site.address, 0)} - ) - return res.fetchone()["size"] + if not row["size"]: + row["size"] = 0 + if not row["size_optional"]: + row["size_optional"] = 0 - def listModified(self, site, since): - res = self.execute( - "SELECT inner_path, modified FROM content WHERE site_id = :site_id AND modified > :since", - {"site_id": self.site_ids.get(site.address, 0), "since": since} - ) + return row["size"], row["size_optional"] + + def listModified(self, site, after=None, before=None): + params = {"site_id": self.site_ids.get(site.address, 0)} + if after: + params["modified>"] = after + if before: + params["modified<"] = before + res = self.execute("SELECT inner_path, modified FROM content WHERE ?", params) return {row["inner_path"]: row["modified"] for row in res} content_dbs = {} diff --git a/src/Content/ContentDbDict.py b/src/Content/ContentDbDict.py index 513ee9d73..b47a15a3b 100644 --- a/src/Content/ContentDbDict.py +++ b/src/Content/ContentDbDict.py @@ -2,6 +2,9 @@ import os import ContentDb +from Debug import Debug +from Config import config + class ContentDbDict(dict): def __init__(self, site, *args, **kwargs): @@ -19,7 +22,10 @@ def loadItem(self, key): try: self.num_loaded += 1 if self.num_loaded % 100 == 0: - self.log.debug("Loaded json: %s (latest: %s)" % (self.num_loaded, key)) + if config.verbose: + self.log.debug("Loaded json: %s (latest: %s) called by: %s" % (self.num_loaded, key, Debug.formatStack())) + else: + self.log.debug("Loaded json: %s (latest: %s)" % (self.num_loaded, key)) content = self.site.storage.loadJson(key) dict.__setitem__(self, key, content) except IOError: @@ -35,9 +41,9 @@ def loadItem(self, key): def getItemSize(self, key): return self.site.storage.getSize(key) - # Only keep last 50 accessed json in memory + # Only keep last 10 accessed json in memory def checkLimit(self): - if len(self.cached_keys) > 50: + if len(self.cached_keys) > 10: key_deleted = self.cached_keys.pop(0) dict.__setitem__(self, key_deleted, False) @@ -73,7 +79,7 @@ def iteritems(self): for key in dict.keys(self): try: val = self[key] - except Exception, err: + except Exception as err: self.log.warning("Error loading %s: %s" % (key, err)) continue yield key, val @@ -83,7 +89,7 @@ def items(self): for key in dict.keys(self): try: val = self[key] - except Exception, err: + except Exception as err: self.log.warning("Error loading %s: %s" % (key, err)) continue back.append((key, val)) @@ -105,6 +111,11 @@ def get(self, key, default=None): return self.__getitem__(key) except KeyError: return default + except Exception as err: + self.site.bad_files[key] = self.site.bad_files.get(key, 1) + dict.__delitem__(self, key) + self.log.warning("Error loading %s: %s" % (key, err)) + return default def execute(self, query, params={}): params["site_id"] = self.db_id diff --git a/src/Content/ContentManager.py b/src/Content/ContentManager.py index 671851e3c..e2e2860a6 100644 --- a/src/Content/ContentManager.py +++ b/src/Content/ContentManager.py @@ -11,10 +11,21 @@ from Config import config from util import helper from util import Diff +from util import SafeRe from Peer import PeerHashfield from ContentDbDict import ContentDbDict +from Plugin import PluginManager +class VerifyError(Exception): + pass + + +class SignError(Exception): + pass + + +@PluginManager.acceptPlugins class ContentManager(object): def __init__(self, site): @@ -29,13 +40,13 @@ def loadContents(self): if len(self.contents) == 0: self.log.debug("ContentDb not initialized, load files from filesystem") self.loadContent(add_bad_files=False, delete_removed_files=False) - self.site.settings["size"] = self.getTotalSize() + self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() # Load hashfield cache if "hashfield" in self.site.settings.get("cache", {}): self.hashfield.fromstring(self.site.settings["cache"]["hashfield"].decode("base64")) del self.site.settings["cache"]["hashfield"] - elif self.contents.get("content.json") and self.getOptionalSize() > 0: + elif self.contents.get("content.json") and self.site.settings["size_optional"] > 0: self.site.storage.updateBadFiles() # No hashfield cache created yet self.has_optional_files = bool(self.hashfield) @@ -67,7 +78,7 @@ def loadContent(self, content_inner_path="content.json", add_bad_files=True, del self.log.warning("%s load error: %s" % (content_path, Debug.formatException(err))) return [], [] else: - self.log.warning("Content.json not exist: %s" % content_path) + self.log.debug("Content.json not exist: %s" % content_path) return [], [] # Content.json not exist try: @@ -100,11 +111,12 @@ def loadContent(self, content_inner_path="content.json", add_bad_files=True, del changed.append(file_inner_path) # Download new file elif old_hash != new_hash and self.hashfield.hasHash(old_hash) and not self.site.settings.get("own"): try: - self.optionalRemove(file_inner_path, old_hash, old_content["files_optional"][relative_path]["size"]) - self.site.storage.delete(file_inner_path) + old_hash_id = self.hashfield.getHashId(old_hash) + self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][relative_path]["size"]) + self.optionalDelete(file_inner_path) self.log.debug("Deleted changed optional file: %s" % file_inner_path) except Exception, err: - self.log.debug("Error deleting file %s: %s" % (file_inner_path, err)) + self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) else: # The file is not in the old content if self.site.isDownloadable(file_inner_path): changed.append(file_inner_path) # Download new file @@ -127,17 +139,20 @@ def loadContent(self, content_inner_path="content.json", add_bad_files=True, del for file_relative_path in deleted: file_inner_path = content_inner_dir + file_relative_path try: - self.site.storage.delete(file_inner_path) # Check if the deleted file is optional if old_content.get("files_optional") and old_content["files_optional"].get(file_relative_path): + self.optionalDelete(file_inner_path) old_hash = old_content["files_optional"][file_relative_path].get("sha512") if self.hashfield.hasHash(old_hash): - self.optionalRemove(file_inner_path, old_hash, old_content["files_optional"][file_relative_path]["size"]) + old_hash_id = self.hashfield.getHashId(old_hash) + self.optionalRemoved(file_inner_path, old_hash_id, old_content["files_optional"][file_relative_path]["size"]) + else: + self.site.storage.delete(file_inner_path) self.log.debug("Deleted file: %s" % file_inner_path) except Exception, err: - self.log.debug("Error deleting file %s: %s" % (file_inner_path, err)) + self.log.debug("Error deleting file %s: %s" % (file_inner_path, Debug.formatException(err))) # Cleanup empty dirs tree = {root: [dirs, files] for root, dirs, files in os.walk(self.site.storage.getPath(content_inner_dir))} @@ -157,7 +172,7 @@ def loadContent(self, content_inner_path="content.json", add_bad_files=True, del if old_content and "user_contents" in new_content and "archived" in new_content["user_contents"]: old_archived = old_content.get("user_contents", {}).get("archived", {}) new_archived = new_content.get("user_contents", {}).get("archived", {}) - self.log.debug("old archived: %s, new archived: %s" % (old_archived, new_archived)) + self.log.debug("old archived: %s, new archived: %s" % (len(old_archived), len(new_archived))) archived_changed = { key: date_archived for key, date_archived in new_archived.iteritems() @@ -169,6 +184,36 @@ def loadContent(self, content_inner_path="content.json", add_bad_files=True, del archived_inner_path = content_inner_dir + archived_dirname + "/content.json" if self.contents.get(archived_inner_path, {}).get("modified", 0) < date_archived: self.removeContent(archived_inner_path) + deleted += archived_inner_path + self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() + + # Check archived before + if old_content and "user_contents" in new_content and "archived_before" in new_content["user_contents"]: + old_archived_before = old_content.get("user_contents", {}).get("archived_before", 0) + new_archived_before = new_content.get("user_contents", {}).get("archived_before", 0) + if old_archived_before != new_archived_before: + self.log.debug("Archived before changed: %s -> %s" % (old_archived_before, new_archived_before)) + + # Remove downloaded archived files + num_removed_contents = 0 + for archived_inner_path in self.listModified(before=new_archived_before): + if archived_inner_path.startswith(content_inner_dir) and archived_inner_path != content_inner_path: + self.removeContent(archived_inner_path) + num_removed_contents += 1 + self.site.settings["size"], self.site.settings["size_optional"] = self.getTotalSize() + + # Remove archived files from download queue + num_removed_bad_files = 0 + for bad_file in self.site.bad_files.keys(): + if bad_file.endswith("content.json"): + del self.site.bad_files[bad_file] + num_removed_bad_files += 1 + + if num_removed_bad_files > 0: + self.site.worker_manager.removeSolvedFileTasks(mark_as_good=False) + gevent.spawn(self.site.update, since=0) + + self.log.debug("Archived removed contents: %s, removed bad files: %s" % (num_removed_contents, num_removed_bad_files)) # Load includes if load_includes and "includes" in new_content: @@ -211,7 +256,7 @@ def loadContent(self, content_inner_path="content.json", add_bad_files=True, del # Update the content self.contents[content_inner_path] = new_content except Exception, err: - self.log.warning("Content.json parse error: %s" % Debug.formatException(err)) + self.log.warning("%s parse error: %s" % (content_inner_path, Debug.formatException(err))) return [], [] # Content.json parse error # Add changed files to bad files @@ -221,6 +266,7 @@ def loadContent(self, content_inner_path="content.json", add_bad_files=True, del for inner_path in deleted: if inner_path in self.site.bad_files: del self.site.bad_files[inner_path] + self.site.worker_manager.removeSolvedFileTasks() if new_content.get("modified", 0) > self.site.settings.get("modified", 0): # Dont store modifications in the far future (more than 10 minute) @@ -261,23 +307,14 @@ def removeContent(self, inner_path): # Get total size of site # Return: 32819 (size of files in kb) def getTotalSize(self, ignore=None): - size = self.contents.db.getTotalSize(self.site, ignore) - if size: - return size - else: - return 0 - - def getOptionalSize(self): - size = self.contents.db.getOptionalSize(self.site) - if size: - return size - else: - return 0 + return self.contents.db.getTotalSize(self.site, ignore) - def listModified(self, since): - return self.contents.db.listModified(self.site, since) + def listModified(self, after=None, before=None): + return self.contents.db.listModified(self.site, after=after, before=before) def listContents(self, inner_path="content.json", user_files=False): + if inner_path not in self.contents: + return [] back = [inner_path] content_inner_dir = helper.getDirname(inner_path) for relative_path in self.contents[inner_path].get("includes", {}).keys(): @@ -287,24 +324,60 @@ def listContents(self, inner_path="content.json", user_files=False): # Returns if file with the given modification date is archived or not def isArchived(self, inner_path, modified): - file_info = self.getFileInfo(inner_path) - match = re.match(".*/(.*?)/", inner_path) + match = re.match("(.*)/(.*?)/", inner_path) if not match: return False - relative_directory = match.group(1) - if file_info and file_info.get("archived", {}).get(relative_directory) >= modified: - return True + user_contents_inner_path = match.group(1) + "/content.json" + relative_directory = match.group(2) + + file_info = self.getFileInfo(user_contents_inner_path) + if file_info: + time_archived_before = file_info.get("archived_before", 0) + time_directory_archived = file_info.get("archived", {}).get(relative_directory) + if modified <= time_archived_before or modified <= time_directory_archived: + return True + else: + return False else: return False + def isDownloaded(self, inner_path, hash_id=None): + if not hash_id: + file_info = self.getFileInfo(inner_path) + if not file_info or "sha512" not in file_info: + return False + hash_id = self.hashfield.getHashId(file_info["sha512"]) + return hash_id in self.hashfield + + # Is modified since signing + def isModified(self, inner_path): + s = time.time() + if inner_path.endswith("content.json"): + try: + is_valid = self.verifyFile(inner_path, self.site.storage.open(inner_path), ignore_same=False) + if is_valid: + is_modified = False + else: + is_modified = True + except VerifyError: + is_modified = True + else: + try: + self.verifyFile(inner_path, self.site.storage.open(inner_path), ignore_same=False) + is_modified = False + except VerifyError: + is_modified = True + return is_modified + # Find the file info line from self.contents # Return: { "sha512": "c29d73d...21f518", "size": 41 , "content_inner_path": "content.json"} - def getFileInfo(self, inner_path): + def getFileInfo(self, inner_path, new_file=False): dirs = inner_path.split("/") # Parent dirs of content.json inner_path_parts = [dirs.pop()] # Filename relative to content.json while True: content_inner_path = "%s/content.json" % "/".join(dirs) - content = self.contents.get(content_inner_path.strip("/")) + content_inner_path = content_inner_path.strip("/") + content = self.contents.get(content_inner_path) # Check in files if content and "files" in content: @@ -312,6 +385,7 @@ def getFileInfo(self, inner_path): if back: back["content_inner_path"] = content_inner_path back["optional"] = False + back["relative_path"] = "/".join(inner_path_parts) return back # Check in optional files @@ -320,13 +394,28 @@ def getFileInfo(self, inner_path): if back: back["content_inner_path"] = content_inner_path back["optional"] = True + back["relative_path"] = "/".join(inner_path_parts) return back # Return the rules if user dir if content and "user_contents" in content: back = content["user_contents"] - # Content.json is in the users dir - back["content_inner_path"] = re.sub("(.*)/.*?$", "\\1/content.json", inner_path) + content_inner_path_dir = helper.getDirname(content_inner_path) + relative_content_path = inner_path[len(content_inner_path_dir):] + user_auth_address_match = re.match("([A-Za-z0-9]+)/.*", relative_content_path) + if user_auth_address_match: + user_auth_address = user_auth_address_match.group(1) + back["content_inner_path"] = "%s%s/content.json" % (content_inner_path_dir, user_auth_address) + else: + back["content_inner_path"] = content_inner_path_dir + "content.json" + back["optional"] = None + back["relative_path"] = "/".join(inner_path_parts) + return back + + if new_file and content: + back = {} + back["content_inner_path"] = content_inner_path + back["relative_path"] = "/".join(inner_path_parts) back["optional"] = None return back @@ -347,6 +436,12 @@ def getRules(self, inner_path, content=None): if not file_info: return False # File not found inner_path = file_info["content_inner_path"] + + if inner_path == "content.json": # Root content.json + rules = {} + rules["signers"] = self.getValidSigners(inner_path, content) + return rules + dirs = inner_path.split("/") # Parent dirs of content.json inner_path_parts = [dirs.pop()] # Filename relative to content.json inner_path_parts.insert(0, dirs.pop()) # Dont check in self dir @@ -369,7 +464,13 @@ def getRules(self, inner_path, content=None): # Return: The rules of the file or False if not allowed def getUserContentRules(self, parent_content, inner_path, content): user_contents = parent_content["user_contents"] - user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) # Delivered for directory + + # Delivered for directory + if "inner_path" in parent_content: + parent_content_dir = helper.getDirname(parent_content["inner_path"]) + user_address = re.match("([A-Za-z0-9]*?)/", inner_path[len(parent_content_dir):]).group(1) + else: + user_address = re.match(".*/([A-Za-z0-9]*?)/.*?$", inner_path).group(1) try: if not content: @@ -380,7 +481,11 @@ def getUserContentRules(self, parent_content, inner_path, content): user_urn = "n-a/n-a" cert_user_id = "n-a" - rules = copy.copy(user_contents["permissions"].get(cert_user_id, {})) # Default rules by username + if user_address in user_contents["permissions"]: + rules = copy.copy(user_contents["permissions"].get(user_address, {})) # Default rules based on address + else: + rules = copy.copy(user_contents["permissions"].get(cert_user_id, {})) # Default rules based on username + if rules is False: banned = True rules = {} @@ -389,7 +494,7 @@ def getUserContentRules(self, parent_content, inner_path, content): if "signers" in rules: rules["signers"] = rules["signers"][:] # Make copy of the signers for permission_pattern, permission_rules in user_contents["permission_rules"].items(): # Regexp rules - if not re.match(permission_pattern, user_urn): + if not SafeRe.match(permission_pattern, user_urn): continue # Rule is not valid for user # Update rules if its better than current recorded ones for key, val in permission_rules.iteritems(): @@ -407,7 +512,10 @@ def getUserContentRules(self, parent_content, inner_path, content): elif type(val) is list: # List, append rules[key] += val - rules["cert_signers"] = user_contents["cert_signers"] # Add valid cert signers + # Accepted cert signers + rules["cert_signers"] = user_contents.get("cert_signers", {}) + rules["cert_signers_pattern"] = user_contents.get("cert_signers_pattern") + if "signers" not in rules: rules["signers"] = [] @@ -421,7 +529,7 @@ def getUserContentRules(self, parent_content, inner_path, content): # Get diffs for changed files def getDiffs(self, inner_path, limit=30 * 1024, update_files=True): if inner_path not in self.contents: - return None + return {} diffs = {} content_inner_path_dir = helper.getDirname(inner_path) for file_relative_path in self.contents[inner_path].get("files", {}): @@ -445,60 +553,94 @@ def getDiffs(self, inner_path, limit=30 * 1024, update_files=True): self.site.storage.delete(file_inner_path + "-old") return diffs + def hashFile(self, dir_inner_path, file_relative_path, optional=False): + back = {} + file_inner_path = dir_inner_path + "/" + file_relative_path + + file_path = self.site.storage.getPath(file_inner_path) + file_size = os.path.getsize(file_path) + sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file + if optional and not self.hashfield.hasHash(sha512sum): + self.optionalDownloaded(file_inner_path, self.hashfield.getHashId(sha512sum), file_size, own=True) + + back[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)} + return back + + def isValidRelativePath(self, relative_path): + if ".." in relative_path: + return False + elif len(relative_path) > 255: + return False + else: + return re.match("^[a-z\[\]\(\) A-Z0-9~_@=\.\+-/]+$", relative_path) + + def sanitizePath(self, inner_path): + return re.sub("[^a-z\[\]\(\) A-Z0-9_@=\.\+-/]", "", inner_path) + # Hash files in directory def hashFiles(self, dir_inner_path, ignore_pattern=None, optional_pattern=None): files_node = {} files_optional_node = {} - if not re.match("^[a-zA-Z0-9_@=\.\+-/]*$", dir_inner_path): + if dir_inner_path and not self.isValidRelativePath(dir_inner_path): ignored = True self.log.error("- [ERROR] Only ascii encoded directories allowed: %s" % dir_inner_path) - for file_relative_path in self.site.storage.list(dir_inner_path): + for file_relative_path in self.site.storage.walk(dir_inner_path, ignore_pattern): file_name = helper.getFilename(file_relative_path) ignored = optional = False if file_name == "content.json": ignored = True - elif ignore_pattern and re.match(ignore_pattern, file_relative_path): - ignored = True elif file_name.startswith(".") or file_name.endswith("-old") or file_name.endswith("-new"): ignored = True - elif not re.match("^[a-zA-Z0-9_@=\.\+\-/]+$", file_relative_path): + elif not self.isValidRelativePath(file_relative_path): + ignored = True + self.log.error("- [ERROR] Invalid filename: %s" % file_relative_path) + elif dir_inner_path == "" and file_relative_path == self.site.storage.getDbFile(): ignored = True - self.log.error("- [ERROR] Only ascii encoded filenames allowed: %s" % file_relative_path) - elif optional_pattern and re.match(optional_pattern, file_relative_path): + elif optional_pattern and SafeRe.match(optional_pattern, file_relative_path): optional = True if ignored: # Ignore content.json, defined regexp and files starting with . self.log.info("- [SKIPPED] %s" % file_relative_path) else: - file_inner_path = dir_inner_path + "/" + file_relative_path - file_path = self.site.storage.getPath(file_inner_path) - sha512sum = CryptHash.sha512sum(file_path) # Calculate sha512 sum of file if optional: - self.log.info("- [OPTIONAL] %s (SHA512: %s)" % (file_relative_path, sha512sum)) - file_size = os.path.getsize(file_path) - files_optional_node[file_relative_path] = {"sha512": sha512sum, "size": file_size} - if not self.hashfield.hasHash(sha512sum): - self.optionalDownloaded(file_inner_path, sha512sum, file_size, own=True) + self.log.info("- [OPTIONAL] %s" % file_relative_path) + files_optional_node.update( + self.hashFile(dir_inner_path, file_relative_path, optional=True) + ) else: - self.log.info("- %s (SHA512: %s)" % (file_relative_path, sha512sum)) - files_node[file_relative_path] = {"sha512": sha512sum, "size": os.path.getsize(file_path)} + self.log.info("- %s" % file_relative_path) + files_node.update( + self.hashFile(dir_inner_path, file_relative_path) + ) return files_node, files_optional_node # Create and sign a content.json # Return: The new content if filewrite = False - def sign(self, inner_path="content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None): + def sign(self, inner_path="content.json", privatekey=None, filewrite=True, update_changed_files=False, extend=None, remove_missing_optional=False): + if not inner_path.endswith("content.json"): + raise SignError("Invalid file name, you can only sign content.json files") + if inner_path in self.contents: - content = self.contents[inner_path] - if self.contents[inner_path].get("cert_sign", False) is None and self.site.storage.isFile(inner_path): + content = self.contents.get(inner_path) + if content and content.get("cert_sign", False) is None and self.site.storage.isFile(inner_path): # Recover cert_sign from file content["cert_sign"] = self.site.storage.loadJson(inner_path).get("cert_sign") else: content = None if not content: # Content not exist yet, load default one self.log.info("File %s not exist yet, loading default values..." % inner_path) - content = {"files": {}, "signs": {}} # Default content.json + + if self.site.storage.isFile(inner_path): + content = self.site.storage.loadJson(inner_path) + if "files" not in content: + content["files"] = {} + if "signs" not in content: + content["signs"] = {} + else: + content = {"files": {}, "signs": {}} # Default content.json + if inner_path == "content.json": # It's the root content.json, add some more fields content["title"] = "%s - ZeroNet_" % self.site.address content["description"] = "" @@ -508,8 +650,9 @@ def sign(self, inner_path="content.json", privatekey=None, filewrite=True, updat if extend: # Add extend keys if not exists for key, val in extend.items(): - if key not in content: + if not content.get(key): content[key] = val + self.log.info("Extending content.json with: %s" % key) directory = helper.getDirname(self.site.storage.getPath(inner_path)) inner_directory = helper.getDirname(inner_path) @@ -520,6 +663,11 @@ def sign(self, inner_path="content.json", privatekey=None, filewrite=True, updat helper.getDirname(inner_path), content.get("ignore"), content.get("optional") ) + if not remove_missing_optional: + for file_inner_path, file_details in content.get("files_optional", {}).iteritems(): + if file_inner_path not in files_optional_node: + files_optional_node[file_inner_path] = file_details + # Find changed files files_merged = files_node.copy() files_merged.update(files_optional_node) @@ -544,7 +692,7 @@ def sign(self, inner_path="content.json", privatekey=None, filewrite=True, updat elif "files_optional" in new_content: del new_content["files_optional"] - new_content["modified"] = time.time() # Add timestamp + new_content["modified"] = int(time.time()) # Add timestamp if inner_path == "content.json": new_content["zeronet_version"] = config.version new_content["signs_required"] = content.get("signs_required", 1) @@ -558,7 +706,7 @@ def sign(self, inner_path="content.json", privatekey=None, filewrite=True, updat privatekey_address = CryptBitcoin.privatekeyToAddress(privatekey) valid_signers = self.getValidSigners(inner_path, new_content) if privatekey_address not in valid_signers: - return self.log.error( + raise SignError( "Private key invalid! Valid signers: %s, Private key address: %s" % (valid_signers, privatekey_address) ) @@ -566,9 +714,8 @@ def sign(self, inner_path="content.json", privatekey=None, filewrite=True, updat if inner_path == "content.json" and privatekey_address == self.site.address: # If signing using the root key, then sign the valid signers - new_content["signers_sign"] = CryptBitcoin.sign( - "%s:%s" % (new_content["signs_required"], ",".join(valid_signers)), privatekey - ) + signers_data = "%s:%s" % (new_content["signs_required"], ",".join(valid_signers)) + new_content["signers_sign"] = CryptBitcoin.sign(str(signers_data), privatekey) if not new_content["signers_sign"]: self.log.info("Old style address, signers_sign is none") @@ -586,13 +733,7 @@ def sign(self, inner_path="content.json", privatekey=None, filewrite=True, updat new_content["signs"] = {} new_content["signs"][privatekey_address] = sign - if inner_path == "content.json": # To root content.json add old format sign for backward compatibility - oldsign_content = json.dumps(new_content, sort_keys=True) - new_content["sign"] = CryptBitcoin.signOld(oldsign_content, privatekey) - - if not self.verifyContent(inner_path, new_content): - self.log.error("Sign failed: Invalid content") - return False + self.verifyContent(inner_path, new_content) if filewrite: self.log.info("Saving to %s..." % inner_path) @@ -612,7 +753,7 @@ def getValidSigners(self, inner_path, content=None): valid_signers = [] if inner_path == "content.json": # Root content.json if "content.json" in self.contents and "signers" in self.contents["content.json"]: - valid_signers += self.contents["content.json"]["signers"].keys() + valid_signers += self.contents["content.json"]["signers"][:] else: rules = self.getRules(inner_path, content) if rules and "signers" in rules: @@ -631,22 +772,37 @@ def verifyCert(self, inner_path, content): rules = self.getRules(inner_path, content) - if not rules.get("cert_signers"): + if not rules: + raise VerifyError("No rules for this file") + + if not rules.get("cert_signers") and not rules.get("cert_signers_pattern"): return True # Does not need cert - name, domain = content["cert_user_id"].split("@") + if "cert_user_id" not in content: + raise VerifyError("Missing cert_user_id") + + if content["cert_user_id"].count("@") != 1: + raise VerifyError("Invalid domain in cert_user_id") + + name, domain = content["cert_user_id"].rsplit("@", 1) cert_address = rules["cert_signers"].get(domain) - if not cert_address: # Cert signer not allowed - self.log.warning("Invalid cert signer: %s" % domain) - return False - return CryptBitcoin.verify( - "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name), cert_address, content["cert_sign"] - ) + if not cert_address: # Unknown Cert signer + if rules.get("cert_signers_pattern") and SafeRe.match(rules["cert_signers_pattern"], domain): + cert_address = domain + else: + raise VerifyError("Invalid cert signer: %s" % domain) + + try: + cert_subject = "%s#%s/%s" % (rules["user_address"], content["cert_auth_type"], name) + result = CryptBitcoin.verify(cert_subject, cert_address, content["cert_sign"]) + except Exception, err: + raise VerifyError("Certificate verify error: %s" % err) + return result # Checks if the content.json content is valid # Return: True or False def verifyContent(self, inner_path, content): - content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in content["files"].values()]) # Size of new content + content_size = len(json.dumps(content, indent=1)) + sum([file["size"] for file in content["files"].values() if file["size"] >= 0]) # Size of new content # Calculate old content size old_content = self.contents.get(inner_path) if old_content: @@ -656,7 +812,11 @@ def verifyContent(self, inner_path, content): old_content_size = 0 old_content_size_optional = 0 - content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values()]) + # Reset site site on first content.json + if not old_content and inner_path == "content.json": + self.site.settings["size"] = 0 + + content_size_optional = sum([file["size"] for file in content.get("files_optional", {}).values() if file["size"] >= 0]) site_size = self.site.settings["size"] - old_content_size + content_size # Site size without old content plus the new site_size_optional = self.site.settings["size_optional"] - old_content_size_optional + content_size_optional # Site size without old content plus the new @@ -664,69 +824,70 @@ def verifyContent(self, inner_path, content): # Check site address if content.get("address") and content["address"] != self.site.address: - self.log.warning("%s: Wrong site address: %s != %s" % (inner_path, content["address"], self.site.address)) - return False + raise VerifyError("Wrong site address: %s != %s" % (content["address"], self.site.address)) # Check file inner path if content.get("inner_path") and content["inner_path"] != inner_path: - self.log.warning("%s: Wrong inner_path: %s" % (inner_path, content["inner_path"])) - return False + raise VerifyError("Wrong inner_path: %s" % content["inner_path"]) # Check total site size limit if site_size > site_size_limit: - self.log.warning("%s: Site too large %s > %s, aborting task..." % (inner_path, site_size, site_size_limit)) if inner_path == "content.json" and self.site.settings["size"] == 0: # First content.json download, save site size to display warning self.site.settings["size"] = site_size task = self.site.worker_manager.findTask(inner_path) if task: # Dont try to download from other peers self.site.worker_manager.failTask(task) - return False + raise VerifyError("Content too large %sB > %sB, aborting task..." % (site_size, site_size_limit)) + + # Verify valid filenames + for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): + if not self.isValidRelativePath(file_relative_path): + raise VerifyError("Invalid relative path: %s" % file_relative_path) if inner_path == "content.json": self.site.settings["size"] = site_size self.site.settings["size_optional"] = site_size_optional return True # Root content.json is passed + else: + if self.verifyContentInclude(inner_path, content, content_size, content_size_optional): + self.site.settings["size"] = site_size + self.site.settings["size_optional"] = site_size_optional + return True + else: + return False + def verifyContentInclude(self, inner_path, content, content_size, content_size_optional): # Load include details rules = self.getRules(inner_path, content) if not rules: - self.log.warning("%s: No rules" % inner_path) - return False + raise VerifyError("No rules") # Check include size limit if rules.get("max_size") is not None: # Include size limit if content_size > rules["max_size"]: - self.log.warning("%s: Include too large %s > %s" % (inner_path, content_size, rules["max_size"])) - return False + raise VerifyError("Include too large %sB > %sB" % (content_size, rules["max_size"])) if rules.get("max_size_optional") is not None: # Include optional files limit if content_size_optional > rules["max_size_optional"]: - self.log.warning("%s: Include optional files too large %s > %s" % ( - inner_path, content_size_optional, rules["max_size_optional"]) + raise VerifyError("Include optional files too large %sB > %sB" % ( + content_size_optional, rules["max_size_optional"]) ) - return False # Filename limit if rules.get("files_allowed"): for file_inner_path in content["files"].keys(): - if not re.match("^%s$" % rules["files_allowed"], file_inner_path): - self.log.warning("%s %s: File not allowed" % (inner_path, file_inner_path)) - return False + if not SafeRe.match("^%s$" % rules["files_allowed"], file_inner_path): + raise VerifyError("File not allowed: %s" % file_inner_path) if rules.get("files_allowed_optional"): for file_inner_path in content.get("files_optional", {}).keys(): - if not re.match("^%s$" % rules["files_allowed_optional"], file_inner_path): - self.log.warning("%s %s: Optional file not allowed" % (inner_path, file_inner_path)) - return False + if not SafeRe.match("^%s$" % rules["files_allowed_optional"], file_inner_path): + raise VerifyError("Optional file not allowed: %s" % file_inner_path) # Check if content includes allowed if rules.get("includes_allowed") is False and content.get("includes"): - self.log.warning("%s: Includes not allowed" % inner_path) - return False # Includes not allowed - - self.site.settings["size"] = site_size - self.site.settings["size_optional"] = site_size_optional + raise VerifyError("Includes not allowed") return True # All good @@ -746,18 +907,16 @@ def verifyFile(self, inner_path, file, ignore_same=True): if old_content["modified"] == new_content["modified"] and ignore_same: # Ignore, have the same content.json return None elif old_content["modified"] > new_content["modified"]: # We have newer - self.log.debug( - "We have newer %s (Our: %s, Sent: %s)" % - (inner_path, old_content["modified"], new_content["modified"]) + raise VerifyError( + "We have newer (Our: %s, Sent: %s)" % + (old_content["modified"], new_content["modified"]) ) - # gevent.spawn(self.site.publish, inner_path=inner_path) # Try to fix the broken peers - return False if new_content["modified"] > time.time() + 60 * 60 * 24: # Content modified in the far future (allow 1 day+) - self.log.warning("%s modify is in the future!" % inner_path) - return False + raise VerifyError("Modify timestamp is in the far future!") if self.isArchived(inner_path, new_content["modified"]): - self.log.warning("%s this file is archived!" % inner_path) - return False + if inner_path in self.site.bad_files: + del self.site.bad_files[inner_path] + raise VerifyError("This file is archived!") # Check sign sign = new_content.get("sign") signs = new_content.get("signs", {}) @@ -765,25 +924,29 @@ def verifyFile(self, inner_path, file, ignore_same=True): del(new_content["sign"]) # The file signed without the sign if "signs" in new_content: del(new_content["signs"]) # The file signed without the signs + sign_content = json.dumps(new_content, sort_keys=True) # Dump the json to string to remove whitepsace - if not self.verifyContent(inner_path, new_content): - return False # Content not valid (files too large, invalid files) + # Fix float representation error on Android + modified = new_content["modified"] + if config.fix_float_decimals and type(modified) is float and not str(modified).endswith(".0"): + modified_fixed = "{:.6f}".format(modified).strip("0.") + sign_content = sign_content.replace( + '"modified": %s' % repr(modified), + '"modified": %s' % modified_fixed + ) if signs: # New style signing valid_signers = self.getValidSigners(inner_path, new_content) signs_required = self.getSignsRequired(inner_path, new_content) if inner_path == "content.json" and len(valid_signers) > 1: # Check signers_sign on root content.json - if not CryptBitcoin.verify( - "%s:%s" % (signs_required, ",".join(valid_signers)), self.site.address, new_content["signers_sign"] - ): - self.log.warning("%s invalid signers_sign!" % inner_path) - return False + signers_data = "%s:%s" % (signs_required, ",".join(valid_signers)) + if not CryptBitcoin.verify(signers_data, self.site.address, new_content["signers_sign"]): + raise VerifyError("Invalid signers_sign!") if inner_path != "content.json" and not self.verifyCert(inner_path, new_content): # Check if cert valid - self.log.warning("%s invalid cert!" % inner_path) - return False + raise VerifyError("Invalid cert!") valid_signs = 0 for address in valid_signers: @@ -791,105 +954,52 @@ def verifyFile(self, inner_path, file, ignore_same=True): valid_signs += CryptBitcoin.verify(sign_content, address, signs[address]) if valid_signs >= signs_required: break # Break if we has enough signs - if config.verbose: - self.log.debug("%s: Valid signs: %s/%s" % (inner_path, valid_signs, signs_required)) - return valid_signs >= signs_required + if valid_signs < signs_required: + raise VerifyError("Valid signs: %s/%s" % (valid_signs, signs_required)) + else: + return self.verifyContent(inner_path, new_content) else: # Old style signing - return CryptBitcoin.verify(sign_content, self.site.address, sign) + if CryptBitcoin.verify(sign_content, self.site.address, sign): + return self.verifyContent(inner_path, new_content) + else: + raise VerifyError("Invalid old-style sign") except Exception, err: - self.log.warning("Verify sign error: %s" % Debug.formatException(err)) - return False + self.log.warning("%s: verify sign error: %s" % (inner_path, Debug.formatException(err))) + raise err else: # Check using sha512 hash file_info = self.getFileInfo(inner_path) if file_info: - if "sha512" in file_info: - hash_valid = CryptHash.sha512sum(file) == file_info["sha512"] - elif "sha1" in file_info: # Backward compatibility - hash_valid = CryptHash.sha1sum(file) == file_info["sha1"] - else: - hash_valid = False + if CryptHash.sha512sum(file) != file_info.get("sha512", ""): + raise VerifyError("Invalid hash") + if file_info.get("size", 0) != file.tell(): - self.log.warning( - "%s file size does not match %s <> %s, Hash: %s" % - (inner_path, file.tell(), file_info.get("size", 0), hash_valid) + raise VerifyError( + "File size does not match %s <> %s" % + (inner_path, file.tell(), file_info.get("size", 0)) ) - return False - return hash_valid + + return True else: # File not in content.json - self.log.warning("File not in content.json: %s" % inner_path) - return False + raise VerifyError("File not in content.json") + + def optionalDelete(self, inner_path): + self.site.storage.delete(inner_path) - def optionalDownloaded(self, inner_path, hash, size=None, own=False): + def optionalDownloaded(self, inner_path, hash_id, size=None, own=False): if size is None: size = self.site.storage.getSize(inner_path) - if type(hash) is int: - done = self.hashfield.appendHashId(hash) - else: - done = self.hashfield.appendHash(hash) + + done = self.hashfield.appendHashId(hash_id) self.site.settings["optional_downloaded"] += size return done - def optionalRemove(self, inner_path, hash, size=None): + def optionalRemoved(self, inner_path, hash_id, size=None): if size is None: size = self.site.storage.getSize(inner_path) - if type(hash) is int: - done = self.hashfield.removeHashId(hash) - else: - done = self.hashfield.removeHash(hash) + done = self.hashfield.removeHashId(hash_id) + self.site.settings["optional_downloaded"] -= size return done - - -if __name__ == "__main__": - def testSign(): - global config - from Site import Site - site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") - content_manager = ContentManager(site) - content_manager.sign( - "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", "5JCGE6UUruhfmAfcZ2GYjvrswkaiq7uLo6Gmtf2ep2Jh2jtNzWR" - ) - - def testVerify(): - from Site import Site - site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") - - content_manager = ContentManager(site) - print "Loaded contents:", content_manager.contents.keys() - - file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json")) - print "content.json valid:", content_manager.verifyFile( - "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json", file, ignore_same=False - ) - - file = open(site.storage.getPath("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json")) - print "messages.json valid:", content_manager.verifyFile( - "data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/messages.json", file, ignore_same=False - ) - - def testInfo(): - from Site import Site - site = Site("12Hw8rTgzrNo4DSh2AkqwPRqDyTticwJyH") - - content_manager = ContentManager(site) - print content_manager.contents.keys() - - print content_manager.getFileInfo("index.html") - print content_manager.getIncludeInfo("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json") - print content_manager.getValidSigners("data/users/1KRxE1s3oDyNDawuYWpzbLUwNm8oDbeEp6/content.json") - print content_manager.getValidSigners("data/users/content.json") - print content_manager.getValidSigners("content.json") - - import sys - import logging - os.chdir("../..") - sys.path.insert(0, os.path.abspath(".")) - sys.path.insert(0, os.path.abspath("src")) - logging.basicConfig(level=logging.DEBUG) - - # testSign() - testVerify() - # testInfo() diff --git a/src/Crypt/CryptBitcoin.py b/src/Crypt/CryptBitcoin.py index 15d76d5fe..91ddb10ac 100644 --- a/src/Crypt/CryptBitcoin.py +++ b/src/Crypt/CryptBitcoin.py @@ -58,6 +58,9 @@ def signOld(data, privatekey): # Return sign to data using private key (backwar def verify(data, address, sign): # Verify data using address and sign + if not sign: + return False + if hasattr(sign, "endswith"): if opensslVerify: # Use the faster method if avalible pub = opensslVerify.getMessagePubkey(data, sign) diff --git a/src/Crypt/CryptConnection.py b/src/Crypt/CryptConnection.py index 61d96accb..0897d2aff 100644 --- a/src/Crypt/CryptConnection.py +++ b/src/Crypt/CryptConnection.py @@ -3,6 +3,7 @@ import os import ssl import hashlib +import random from Config import config from util import SslPatch @@ -20,6 +21,12 @@ def __init__(self): self.crypt_supported = [] # Supported cryptos + self.cacert_pem = config.data_dir + "/cacert-rsa.pem" + self.cakey_pem = config.data_dir + "/cakey-rsa.pem" + self.cert_pem = config.data_dir + "/cert-rsa.pem" + self.cert_csr = config.data_dir + "/cert-rsa.csr" + self.key_pem = config.data_dir + "/key-rsa.pem" + # Select crypt that supported by both sides # Return: Name of the crypto def selectCrypt(self, client_supported): @@ -32,12 +39,13 @@ def selectCrypt(self, client_supported): # Return: wrapped socket def wrapSocket(self, sock, crypt, server=False, cert_pin=None): if crypt == "tls-rsa": - ciphers = "ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:AES128-GCM-SHA256:AES128-SHA256:HIGH:" + ciphers = "ECDHE-RSA-CHACHA20-POLY1305:ECDHE-RSA-AES128-GCM-SHA256:AES128-SHA256:AES256-SHA:" ciphers += "!aNULL:!eNULL:!EXPORT:!DSS:!DES:!RC4:!3DES:!MD5:!PSK" if server: sock_wrapped = ssl.wrap_socket( - sock, server_side=server, keyfile='%s/key-rsa.pem' % config.data_dir, - certfile='%s/cert-rsa.pem' % config.data_dir, ciphers=ciphers) + sock, server_side=server, keyfile=self.key_pem, + certfile=self.cert_pem, ciphers=ciphers + ) else: sock_wrapped = ssl.wrap_socket(sock, ciphers=ciphers) if cert_pin: @@ -50,7 +58,7 @@ def wrapSocket(self, sock, crypt, server=False, cert_pin=None): def removeCerts(self): if config.keep_ssl_cert: return False - for file_name in ["cert-rsa.pem", "key-rsa.pem"]: + for file_name in ["cert-rsa.pem", "key-rsa.pem", "cacert-rsa.pem", "cakey-rsa.pem", "cacert-rsa.srl", "cert-rsa.csr"]: file_path = "%s/%s" % (config.data_dir, file_name) if os.path.isfile(file_path): os.unlink(file_path) @@ -60,68 +68,90 @@ def loadCerts(self): if config.disable_encryption: return False - if self.createSslRsaCert(): + if self.createSslRsaCert() and "tls-rsa" not in self.crypt_supported: self.crypt_supported.append("tls-rsa") # Try to create RSA server cert + sign for connection encryption # Return: True on success def createSslRsaCert(self): - if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir): + casubjects = [ + "/C=US/O=Amazon/OU=Server CA 1B/CN=Amazon", + "/C=US/O=Let's Encrypt/CN=Let's Encrypt Authority X3", + "/C=US/O=DigiCert Inc/OU=www.digicert.com/CN=DigiCert SHA2 High Assurance Server CA", + "/C=GB/ST=Greater Manchester/L=Salford/O=COMODO CA Limited/CN=COMODO RSA Domain Validation Secure Server CA" + ] + fakedomains = [ + "yahoo.com", "amazon.com", "live.com", "microsoft.com", "mail.ru", "csdn.net", "bing.com", + "amazon.co.jp", "office.com", "imdb.com", "msn.com", "samsung.com", "huawei.com", "ztedevices.com", + "godaddy.com", "w3.org", "gravatar.com", "creativecommons.org", "hatena.ne.jp", + "adobe.com", "opera.com", "apache.org", "rambler.ru", "one.com", "nationalgeographic.com", + "networksolutions.com", "php.net", "python.org", "phoca.cz", "debian.org", "ubuntu.com", + "nazwa.pl", "symantec.com" + ] + self.openssl_env['CN'] = random.choice(fakedomains) + + if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): return True # Files already exits import subprocess + # Generate CAcert and CAkey + cmd = "%s req -new -newkey rsa:2048 -days 3650 -nodes -x509 -subj %s -keyout %s -out %s -batch -config %s" % helper.shellquote( + self.openssl_bin, + random.choice(casubjects), + self.cakey_pem, + self.cacert_pem, + self.openssl_env["OPENSSL_CONF"], + ) proc = subprocess.Popen( - "%s req -x509 -newkey rsa:2048 -sha256 -batch -keyout %s -out %s -nodes -config %s" % helper.shellquote( - self.openssl_bin, - config.data_dir+"/key-rsa.pem", - config.data_dir+"/cert-rsa.pem", - self.openssl_env["OPENSSL_CONF"] - ), + cmd.encode(sys.getfilesystemencoding()), shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env ) back = proc.stdout.read().strip() proc.wait() - logging.debug("Generating RSA cert and key PEM files...%s" % back) + logging.debug("Generating RSA CAcert and CAkey PEM files...%s" % back) - if os.path.isfile("%s/cert-rsa.pem" % config.data_dir) and os.path.isfile("%s/key-rsa.pem" % config.data_dir): - return True - else: - logging.error("RSA ECC SSL cert generation failed, cert or key files not exist.") + if not (os.path.isfile(self.cacert_pem) and os.path.isfile(self.cakey_pem)): + logging.error("RSA ECC SSL CAcert generation failed, CAcert or CAkey files not exist.") return False - # Not used yet: Missing on some platform - """def createSslEccCert(self): - return False - import subprocess - - # Create ECC privatekey + # Generate certificate key and signing request + cmd = "%s req -new -newkey rsa:2048 -keyout %s -out %s -subj %s -sha256 -nodes -batch -config %s" % helper.shellquote( + self.openssl_bin, + self.key_pem, + self.cert_csr, + "/CN=" + self.openssl_env['CN'], + self.openssl_env["OPENSSL_CONF"], + ) proc = subprocess.Popen( - "%s ecparam -name prime256v1 -genkey -out %s/key-ecc.pem" % (self.openssl_bin, config.data_dir), + cmd.encode(sys.getfilesystemencoding()), shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env ) back = proc.stdout.read().strip() proc.wait() - self.log.debug("Generating ECC privatekey PEM file...%s" % back) - - # Create ECC cert + logging.debug("Generating certificate key and signing request...%s" % back) + + # Sign request and generate certificate + cmd = "%s x509 -req -in %s -CA %s -CAkey %s -CAcreateserial -out %s -days 730 -sha256 -extensions x509_ext -extfile %s" % helper.shellquote( + self.openssl_bin, + self.cert_csr, + self.cacert_pem, + self.cakey_pem, + self.cert_pem, + self.openssl_env["OPENSSL_CONF"], + ) proc = subprocess.Popen( - "%s req -new -key %s -x509 -nodes -out %s -config %s" % helper.shellquote( - self.openssl_bin, - config.data_dir+"/key-ecc.pem", - config.data_dir+"/cert-ecc.pem", - self.openssl_env["OPENSSL_CONF"] - ), + cmd.encode(sys.getfilesystemencoding()), shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=self.openssl_env ) back = proc.stdout.read().strip() proc.wait() - self.log.debug("Generating ECC cert PEM file...%s" % back) + logging.debug("Generating RSA cert...%s" % back) - if os.path.isfile("%s/cert-ecc.pem" % config.data_dir) and os.path.isfile("%s/key-ecc.pem" % config.data_dir): + if os.path.isfile(self.cert_pem) and os.path.isfile(self.key_pem): return True else: - self.logging.error("ECC SSL cert generation failed, cert or key files not exits.") + logging.error("RSA ECC SSL cert generation failed, cert or key files not exist.") return False - """ + manager = CryptConnectionManager() diff --git a/src/Crypt/CryptHash.py b/src/Crypt/CryptHash.py index fb0c2dab2..118053b63 100644 --- a/src/Crypt/CryptHash.py +++ b/src/Crypt/CryptHash.py @@ -12,13 +12,19 @@ def sha1sum(file, blocksize=65536): return hash.hexdigest() -def sha512sum(file, blocksize=65536): +def sha512sum(file, blocksize=65536, format="hexdigest"): if hasattr(file, "endswith"): # Its a string open it file = open(file, "rb") hash = hashlib.sha512() for block in iter(lambda: file.read(blocksize), ""): hash.update(block) - return hash.hexdigest()[0:64] # Truncate to 256bits is good enough + + # Truncate to 256bits is good enough + if format == "hexdigest": + return hash.hexdigest()[0:64] + else: + return hash.digest()[0:32] + def sha256sum(file, blocksize=65536): @@ -38,20 +44,23 @@ def random(length=64, encoding="hex"): return hashlib.sha512(os.urandom(256)).hexdigest()[0:length] +# Sha512 truncated to 256bits +class Sha512t: + def __init__(self, data): + if data: + self.sha512 = hashlib.sha512(data) + else: + self.sha512 = hashlib.sha512() + + def hexdigest(self): + return self.sha512.hexdigest()[0:64] + + def digest(self): + return self.sha512.digest()[0:32] -if __name__ == "__main__": - import cStringIO as StringIO - a = StringIO.StringIO() - a.write("hello!") - a.seek(0) - print hashlib.sha1("hello!").hexdigest() - print sha1sum(a) + def update(self, data): + return self.sha512.update(data) - import time - s = time.time() - print sha1sum(open("F:\\Temp\\bigfile")), - print time.time() - s - s = time.time() - print sha512sum(open("F:\\Temp\\bigfile")), - print time.time() - s +def sha512t(data=None): + return Sha512t(data) diff --git a/src/Db/Db.py b/src/Db/Db.py index 6265bfc91..186d45fef 100644 --- a/src/Db/Db.py +++ b/src/Db/Db.py @@ -6,8 +6,11 @@ import os import gevent +from Debug import Debug from DbCursor import DbCursor from Config import config +from util import SafeRe +from util import helper opened_dbs = [] @@ -18,7 +21,7 @@ def dbCleanup(): time.sleep(60 * 5) for db in opened_dbs[:]: idle = time.time() - db.last_query_time - if idle > 60 * 5: + if idle > 60 * 5 and db.close_idle: db.close() gevent.spawn(dbCleanup) @@ -26,7 +29,7 @@ def dbCleanup(): class Db(object): - def __init__(self, schema, db_path): + def __init__(self, schema, db_path, close_idle=False): self.db_path = db_path self.db_dir = os.path.dirname(db_path) + "/" self.schema = schema @@ -41,10 +44,11 @@ def __init__(self, schema, db_path): self.db_keyvalues = {} self.delayed_queue = [] self.delayed_queue_thread = None + self.close_idle = close_idle self.last_query_time = time.time() def __repr__(self): - return "" % self.db_path + return "" % (id(self), self.db_path, self.close_idle) def connect(self): if self not in opened_dbs: @@ -55,15 +59,10 @@ def connect(self): self.log.debug("Created Db path: %s" % self.db_dir) if not os.path.isfile(self.db_path): self.log.debug("Db file not exist yet: %s" % self.db_path) - self.conn = sqlite3.connect(self.db_path) + self.conn = sqlite3.connect(self.db_path, check_same_thread=False) self.conn.row_factory = sqlite3.Row self.conn.isolation_level = None self.cur = self.getCursor() - # We need more speed then security - self.cur.execute("PRAGMA journal_mode = MEMORY") - self.cur.execute("PRAGMA synchronous = OFF") - if self.foreign_keys: - self.execute("PRAGMA foreign_keys = ON") self.log.debug( "Connected to %s in %.3fs (opened: %s, sqlite version: %s)..." % (self.db_path, time.time() - s, len(opened_dbs), sqlite3.version) @@ -71,20 +70,18 @@ def connect(self): # Execute query using dbcursor def execute(self, query, params=None): - self.last_query_time = time.time() if not self.conn: self.connect() return self.cur.execute(query, params) def insertOrUpdate(self, *args, **kwargs): - self.last_query_time = time.time() if not self.conn: self.connect() return self.cur.insertOrUpdate(*args, **kwargs) def executeDelayed(self, *args, **kwargs): if not self.delayed_queue_thread: - self.delayed_queue_thread = gevent.spawn_later(10, self.processDelayed) + self.delayed_queue_thread = gevent.spawn_later(1, self.processDelayed) self.delayed_queue.append(("execute", (args, kwargs))) def insertOrUpdateDelayed(self, *args, **kwargs): @@ -96,7 +93,6 @@ def processDelayed(self): if not self.delayed_queue: self.log.debug("processDelayed aborted") return - self.last_query_time = time.time() if not self.conn: self.connect() @@ -127,14 +123,25 @@ def close(self): self.conn.close() self.conn = None self.cur = None - self.log.debug("%s closed in %.3fs, opened: %s" % (self.db_path, time.time() - s, opened_dbs)) + self.log.debug("%s closed in %.3fs, opened: %s" % (self.db_path, time.time() - s, len(opened_dbs))) # Gets a cursor object to database # Return: Cursor class def getCursor(self): if not self.conn: self.connect() - return DbCursor(self.conn, self) + + cur = DbCursor(self.conn, self) + if config.db_mode == "security": + cur.execute("PRAGMA journal_mode = WAL") + cur.execute("PRAGMA synchronous = NORMAL") + else: + cur.execute("PRAGMA journal_mode = MEMORY") + cur.execute("PRAGMA synchronous = OFF") + if self.foreign_keys: + cur.execute("PRAGMA foreign_keys = ON") + + return cur # Get the table version # Return: Table version or None if not exist @@ -173,42 +180,46 @@ def checkTables(self): if changed: changed_tables.append("keyvalue") - # Check json table - if self.schema["version"] == 1: - changed = cur.needTable("json", [ - ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], - ["path", "VARCHAR(255)"] - ], [ - "CREATE UNIQUE INDEX path ON json(path)" - ], version=self.schema["version"]) - elif self.schema["version"] == 2: - changed = cur.needTable("json", [ - ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], - ["directory", "VARCHAR(255)"], - ["file_name", "VARCHAR(255)"] - ], [ - "CREATE UNIQUE INDEX path ON json(directory, file_name)" - ], version=self.schema["version"]) - elif self.schema["version"] == 3: - changed = cur.needTable("json", [ - ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], - ["site", "VARCHAR(255)"], - ["directory", "VARCHAR(255)"], - ["file_name", "VARCHAR(255)"] - ], [ - "CREATE UNIQUE INDEX path ON json(directory, site, file_name)" - ], version=self.schema["version"]) - if changed: - changed_tables.append("json") + # Create json table if no custom one defined + if "json" not in self.schema.get("tables", {}): + if self.schema["version"] == 1: + changed = cur.needTable("json", [ + ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], + ["path", "VARCHAR(255)"] + ], [ + "CREATE UNIQUE INDEX path ON json(path)" + ], version=self.schema["version"]) + elif self.schema["version"] == 2: + changed = cur.needTable("json", [ + ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], + ["directory", "VARCHAR(255)"], + ["file_name", "VARCHAR(255)"] + ], [ + "CREATE UNIQUE INDEX path ON json(directory, file_name)" + ], version=self.schema["version"]) + elif self.schema["version"] == 3: + changed = cur.needTable("json", [ + ["json_id", "INTEGER PRIMARY KEY AUTOINCREMENT"], + ["site", "VARCHAR(255)"], + ["directory", "VARCHAR(255)"], + ["file_name", "VARCHAR(255)"] + ], [ + "CREATE UNIQUE INDEX path ON json(directory, site, file_name)" + ], version=self.schema["version"]) + if changed: + changed_tables.append("json") # Check schema tables - for table_name, table_settings in self.schema["tables"].items(): - changed = cur.needTable( - table_name, table_settings["cols"], - table_settings["indexes"], version=table_settings["schema_changed"] - ) - if changed: - changed_tables.append(table_name) + for table_name, table_settings in self.schema.get("tables", {}).items(): + try: + changed = cur.needTable( + table_name, table_settings["cols"], + table_settings.get("indexes", []), version=table_settings.get("schema_changed", 0) + ) + if changed: + changed_tables.append(table_name) + except Exception as err: + self.log.error("Error creating table %s: %s" % (table_name, Debug.formatException(err))) cur.execute("COMMIT") self.log.debug("Db check done in %.3fs, changed tables: %s" % (time.time() - s, changed_tables)) @@ -217,17 +228,21 @@ def checkTables(self): return changed_tables - # Load json file to db + # Update json file to db # Return: True if matched - def loadJson(self, file_path, file=None, cur=None): + def updateJson(self, file_path, file=None, cur=None): if not file_path.startswith(self.db_dir): return False # Not from the db dir: Skipping - relative_path = re.sub("^%s" % self.db_dir, "", file_path) # File path realative to db file + relative_path = file_path[len(self.db_dir):] # File path realative to db file + # Check if filename matches any of mappings in schema matched_maps = [] for match, map_settings in self.schema["maps"].items(): - if re.match(match, relative_path): - matched_maps.append(map_settings) + try: + if SafeRe.match(match, relative_path): + matched_maps.append(map_settings) + except SafeRe.UnsafePatternError as err: + self.log.error(err) # No match found for the file if not matched_maps: @@ -236,12 +251,15 @@ def loadJson(self, file_path, file=None, cur=None): # Load the json file try: if file is None: # Open file is not file object passed - file = open(file_path) + file = open(file_path, "rb") if file is False: # File deleted data = {} else: - data = json.load(file) + if file_path.endswith("json.gz"): + data = json.load(helper.limitedGzipFile(fileobj=file)) + else: + data = json.load(file) except Exception, err: self.log.debug("Json file %s load error: %s" % (file_path, err)) data = {} @@ -256,7 +274,7 @@ def loadJson(self, file_path, file=None, cur=None): commit_after_done = False # Row for current json file if required - if filter(lambda dbmap: "to_keyvalue" in dbmap or "to_table" in dbmap, matched_maps): + if not data or filter(lambda dbmap: "to_keyvalue" in dbmap or "to_table" in dbmap, matched_maps): json_row = cur.getJsonRow(relative_path) # Check matched mappings in schema @@ -330,7 +348,7 @@ def loadJson(self, file_path, file=None, cur=None): {key_col: key, val_col: val, "json_id": json_row["json_id"]} ) else: # Multi value - if isinstance(val, dict): # Single row + if type(val) is dict: # Single row row = val if import_cols: row = {key: row[key] for key in row if key in import_cols} # Filter row by import_cols @@ -344,7 +362,7 @@ def loadJson(self, file_path, file=None, cur=None): row["json_id"] = json_row["json_id"] cur.execute("INSERT OR REPLACE INTO %s ?" % table_name, row) - else: # Multi row + elif type(val) is list: # Multi row for row in val: row[key_col] = key row["json_id"] = json_row["json_id"] @@ -378,10 +396,10 @@ def loadJson(self, file_path, file=None, cur=None): cur = dbjson.getCursor() cur.execute("BEGIN") cur.logging = False - dbjson.loadJson("data/users/content.json", cur=cur) + dbjson.updateJson("data/users/content.json", cur=cur) for user_dir in os.listdir("data/users"): if os.path.isdir("data/users/%s" % user_dir): - dbjson.loadJson("data/users/%s/data.json" % user_dir, cur=cur) + dbjson.updateJson("data/users/%s/data.json" % user_dir, cur=cur) # print ".", cur.logging = True cur.execute("COMMIT") diff --git a/src/Db/DbCursor.py b/src/Db/DbCursor.py index 728335d91..f397ff0cb 100644 --- a/src/Db/DbCursor.py +++ b/src/Db/DbCursor.py @@ -1,5 +1,7 @@ import time import re +from util import helper + # Special sqlite cursor @@ -13,6 +15,7 @@ def __init__(self, conn, db): self.logging = False def execute(self, query, params=None): + self.db.last_query_time = time.time() if isinstance(params, dict) and "?" in query: # Make easier select and insert by allowing dict params if query.startswith("SELECT") or query.startswith("DELETE") or query.startswith("UPDATE"): # Convert param dict to SELECT * FROM table WHERE key = ? AND key2 = ? format @@ -21,18 +24,34 @@ def execute(self, query, params=None): for key, value in params.items(): if type(value) is list: if key.startswith("not__"): - query_wheres.append(key.replace("not__", "") + " NOT IN (" + ",".join(["?"] * len(value)) + ")") + field = key.replace("not__", "") + operator = "NOT IN" + else: + field = key + operator = "IN" + if len(value) > 100: + # Embed values in query to avoid "too many SQL variables" error + query_values = ",".join(map(helper.sqlquote, value)) else: - query_wheres.append(key + " IN (" + ",".join(["?"] * len(value)) + ")") - values += value + query_values = ",".join(["?"] * len(value)) + values += value + query_wheres.append("%s %s (%s)" % + (field, operator, query_values) + ) else: if key.startswith("not__"): query_wheres.append(key.replace("not__", "") + " != ?") + elif key.endswith(">"): + query_wheres.append(key.replace(">", "") + " > ?") + elif key.endswith("<"): + query_wheres.append(key.replace("<", "") + " < ?") else: query_wheres.append(key + " = ?") values.append(value) wheres = " AND ".join(query_wheres) - query = re.sub("(.*)[?]", "\\1%s" % wheres, query) # Replace the last ? + if wheres == "": + wheres = "1" + query = re.sub("(.*)[?]", "\\1 %s" % wheres, query) # Replace the last ? params = values else: # Convert param dict to INSERT INTO table (key, key2) VALUES (?, ?) format @@ -41,9 +60,23 @@ def execute(self, query, params=None): keysvalues = "(%s) VALUES (%s)" % (keys, values) query = re.sub("(.*)[?]", "\\1%s" % keysvalues, query) # Replace the last ? params = tuple(params.values()) + elif isinstance(params, dict) and ":" in query: + new_params = dict() + values = [] + for key, value in params.items(): + if type(value) is list: + for idx, val in enumerate(value): + new_params[key + "__" + str(idx)] = val + + new_names = [":" + key + "__" + str(idx) for idx in range(len(value))] + query = re.sub(r":" + re.escape(key) + r"([)\s]|$)", "(%s)%s" % (", ".join(new_names), r"\1"), query) + else: + new_params[key] = value + + params = new_params + s = time.time() - # if query == "COMMIT": self.logging = True # Turn logging back on transaction commit if params: # Query has parameters res = self.cursor.execute(query, params) @@ -61,7 +94,6 @@ def execute(self, query, params=None): self.db.query_stats[query]["call"] += 1 self.db.query_stats[query]["time"] += time.time() - s - # if query == "BEGIN": self.logging = False # Turn logging off on transaction commit return res # Creates on updates a database row without incrementing the rowid @@ -94,8 +126,10 @@ def createTable(self, table, cols): # Create indexes on table # Return: True on success def createIndexes(self, table, indexes): - # indexes.append("CREATE INDEX %s_id ON %s(%s_id)" % (table, table, table)) # Primary key index for index in indexes: + if not index.strip().upper().startswith("CREATE"): + self.db.log.error("Index command should start with CREATE: %s" % index) + continue self.execute(index) # Create table if not exist @@ -144,6 +178,8 @@ def getJsonRow(self, file_path): self.execute("INSERT INTO json ?", {"site": site_address, "directory": directory, "file_name": file_name}) res = self.execute("SELECT * FROM json WHERE ? LIMIT 1", {"site": site_address, "directory": directory, "file_name": file_name}) row = res.fetchone() + else: + raise Exception("Dbschema version %s not supported" % self.db.schema.get("version")) return row def close(self): diff --git a/src/Db/DbQuery.py b/src/Db/DbQuery.py index 10ce773bd..a7730d5be 100644 --- a/src/Db/DbQuery.py +++ b/src/Db/DbQuery.py @@ -22,6 +22,8 @@ def parseFields(self, query_select): def parseWheres(self, query_where): if " AND " in query_where: return query_where.split(" AND ") + elif query_where: + return [query_where] else: return [] diff --git a/src/Debug/Debug.py b/src/Debug/Debug.py index 3d6b63947..960d260cd 100644 --- a/src/Debug/Debug.py +++ b/src/Debug/Debug.py @@ -46,20 +46,19 @@ def formatStack(): # Test if gevent eventloop blocks -if config.debug_gevent: - import logging - import gevent - import time - - def testBlock(): - logging.debug("Gevent block checker started") +import logging +import gevent +import time + +def testBlock(): + logging.debug("Gevent block checker started") + last_time = time.time() + while 1: + time.sleep(1) + if time.time() - last_time > 1.1: + logging.debug("Gevent block detected: %s" % (time.time() - last_time - 1)) last_time = time.time() - while 1: - time.sleep(1) - if time.time() - last_time > 1.1: - logging.debug("Gevent block detected: %s" % (time.time() - last_time - 1)) - last_time = time.time() - gevent.spawn(testBlock) +gevent.spawn(testBlock) if __name__ == "__main__": diff --git a/src/Debug/DebugHook.py b/src/Debug/DebugHook.py index a95d9719b..1e96125dc 100644 --- a/src/Debug/DebugHook.py +++ b/src/Debug/DebugHook.py @@ -1,5 +1,6 @@ import sys import logging +import signal import gevent import gevent.hub @@ -8,19 +9,22 @@ last_error = None -def shutdown(): - print "Shutting down..." - try: - if "file_server" in dir(sys.modules["main"]): - gevent.spawn(sys.modules["main"].file_server.stop) - if "ui_server" in dir(sys.modules["main"]): - gevent.spawn(sys.modules["main"].ui_server.stop) - except Exception, err: - print "Proper shutdown error: %s" % err +def shutdown(reason="Unknown"): + logging.info("Shutting down (reason: %s)..." % reason) + if "file_server" in dir(sys.modules["main"]) and sys.modules["main"].file_server.running: + try: + if "file_server" in dir(sys.modules["main"]): + gevent.spawn(sys.modules["main"].file_server.stop) + if "ui_server" in dir(sys.modules["main"]): + gevent.spawn(sys.modules["main"].ui_server.stop) + except Exception as err: + print "Proper shutdown error: %s" % err + sys.exit(0) + else: sys.exit(0) # Store last error, ignore notify, allow manual error logging -def handleError(*args): +def handleError(*args, **kwargs): global last_error if not args: # Manual called args = sys.exc_info() @@ -29,21 +33,23 @@ def handleError(*args): silent = False if args[0].__name__ != "Notify": last_error = args + if args[0].__name__ == "KeyboardInterrupt": - shutdown() - return - if not silent and args[0].__name__ != "Notify": + shutdown("Keyboard interrupt") + elif not silent and args[0].__name__ != "Notify": logging.exception("Unhandled exception") - sys.__excepthook__(*args) + if "greenlet.py" not in args[2].tb_frame.f_code.co_filename: # Don't display error twice + sys.__excepthook__(*args, **kwargs) # Ignore notify errors -def handleErrorNotify(*args): - if args[0].__name__ == "KeyboardInterrupt": - shutdown() - if args[0].__name__ != "Notify": - logging.exception("Unhandled exception") - sys.__excepthook__(*args) +def handleErrorNotify(*args, **kwargs): + err = args[0] + if err.__name__ == "KeyboardInterrupt": + shutdown("Keyboard interrupt") + elif err.__name__ != "Notify": + logging.error("Unhandled exception: %s" % [args]) + sys.__excepthook__(*args, **kwargs) if config.debug: # Keep last error for /Debug @@ -75,6 +81,12 @@ def handleGreenletError(self, context, type, value, tb): gevent.hub.Hub.handle_error = handleGreenletError +try: + signal.signal(signal.SIGTERM, lambda signum, stack_frame: shutdown("SIGTERM")) +except Exception as err: + logging.debug("Error setting up SIGTERM watcher: %s" % err) + + if __name__ == "__main__": import time from gevent import monkey @@ -93,4 +105,4 @@ def sleeper(num): thread1.kill(exception=Debug.Notify("Worker stopped")) #thread2.throw(Debug.Notify("Throw")) print "killed" - gevent.joinall([thread1,thread2]) \ No newline at end of file + gevent.joinall([thread1,thread2]) diff --git a/src/Debug/DebugMedia.py b/src/Debug/DebugMedia.py index 497d06cbd..4f576860a 100644 --- a/src/Debug/DebugMedia.py +++ b/src/Debug/DebugMedia.py @@ -18,7 +18,7 @@ def sorter(f1, f2): elif f2 == "": return -1 else: - return cmp(f1, f2) + return cmp(f1.lower(), f2.lower()) for root, dirs, files in sorted(os.walk(path, topdown=False), cmp=sorter): for file in sorted(files): @@ -68,7 +68,7 @@ def merge(merged_path): if os.path.isfile(merged_path): # Find old parts to avoid unncessary recompile merged_old = open(merged_path, "rb").read().decode("utf8") old_parts = {} - for match in re.findall("(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): + for match in re.findall(r"(/\* ---- (.*?) ---- \*/(.*?)(?=/\* ----|$))", merged_old, re.DOTALL): old_parts[match[1]] = match[2].strip("\n\r") # Merge files @@ -85,7 +85,7 @@ def merge(merged_path): return False # No coffeescript compiler, skip this file # Replace / with os separators and escape it - file_path_escaped = helper.shellquote(os.path.join(*file_path.split("/"))) + file_path_escaped = helper.shellquote(file_path.replace("/", os.path.sep)) if "%s" in config.coffeescript_compiler: # Replace %s with coffeescript file command = config.coffeescript_compiler % file_path_escaped diff --git a/src/Debug/DebugReloader.py b/src/Debug/DebugReloader.py index 1c643f6ec..247f44320 100644 --- a/src/Debug/DebugReloader.py +++ b/src/Debug/DebugReloader.py @@ -6,9 +6,10 @@ if config.debug: # Only load pyfilesytem if using debug mode try: - from fs.osfs import OSFS - pyfilesystem = OSFS("src") - pyfilesystem_plugins = OSFS("plugins") + import fs.watch + import fs.osfs + pyfilesystem = fs.osfs.OSFS("src") + pyfilesystem_plugins = fs.osfs.OSFS("plugins") logging.debug("Pyfilesystem detected, source code autoreload enabled") except Exception, err: pyfilesystem = False @@ -23,26 +24,30 @@ def __init__(self, callback, directory="/"): if pyfilesystem: self.directory = directory self.callback = callback - logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback)) - thread = threading.Thread(target=self.addWatcher) - thread.daemon = True - thread.start() + if config.action == "main": + logging.debug("Adding autoreload: %s, cb: %s" % (directory, callback)) + thread = threading.Thread(target=self.addWatcher) + thread.daemon = True + thread.start() def addWatcher(self, recursive=True): try: time.sleep(1) # Wait for .pyc compiles - pyfilesystem.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive) - pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=None, recursive=recursive) + watch_events = [fs.watch.CREATED, fs.watch.MODIFIED] + pyfilesystem.add_watcher(self.changed, path=self.directory, events=watch_events, recursive=recursive) + pyfilesystem_plugins.add_watcher(self.changed, path=self.directory, events=watch_events, recursive=recursive) except Exception, err: print "File system watcher failed: %s (on linux pyinotify not gevent compatible yet :( )" % err def changed(self, evt): if ( not evt.path or "%s/" % config.data_dir in evt.path or - not evt.path.endswith("py") or - time.time() - self.last_chaged < 1 + (not evt.path.endswith("py") and not evt.path.endswith("json")) or + "Test" in evt.path or + time.time() - self.last_chaged < 5.0 ): return False # Ignore *.pyc changes and no reload within 1 sec + self.last_chaged = time.time() + logging.debug("File changed: %s, cb: %s reloading source code" % (evt.path, self.callback)) time.sleep(0.1) # Wait for lock release self.callback() - self.last_chaged = time.time() diff --git a/src/File/FileRequest.py b/src/File/FileRequest.py index c0c28e642..b3a13f7f3 100644 --- a/src/File/FileRequest.py +++ b/src/File/FileRequest.py @@ -2,7 +2,9 @@ import os import time import json +import collections import itertools +import socket # Third party modules import gevent @@ -13,10 +15,15 @@ from util import StreamingMsgpack from util import helper from Plugin import PluginManager +from contextlib import closing FILE_BUFF = 1024 * 512 +class RequestError(Exception): + pass + + # Incoming requests @PluginManager.acceptPlugins class FileRequest(object): @@ -55,29 +62,34 @@ def response(self, msg, streaming=False): def route(self, cmd, req_id, params): self.req_id = req_id # Don't allow other sites than locked - if "site" in params and self.connection.site_lock and self.connection.site_lock not in (params["site"], "global"): - self.response({"error": "Invalid site"}) - self.log.error("Site lock violation: %s != %s" % (self.connection.site_lock != params["site"])) - self.connection.badAction(5) - return False + if "site" in params and self.connection.target_onion: + valid_sites = self.connection.getValidSites() + if params["site"] not in valid_sites and valid_sites != ["global"]: + self.response({"error": "Invalid site"}) + self.connection.log( + "Site lock violation: %s not in %s, target onion: %s" % + (params["site"], valid_sites, self.connection.target_onion) + ) + self.connection.badAction(5) + return False if cmd == "update": event = "%s update %s %s" % (self.connection.id, params["site"], params["inner_path"]) - if not RateLimit.isAllowed(event): # There was already an update for this file in the last 10 second - time.sleep(5) - self.response({"ok": "File update queued"}) # If called more than once within 15 sec only keep the last update RateLimit.callAsync(event, max(self.connection.bad_actions, 15), self.actionUpdate, params) else: func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) if cmd not in ["getFile", "streamFile"]: # Skip IO bound functions - s = time.time() if self.connection.cpu_time > 0.5: - self.log.debug("Delay %s %s, cpu_time used by connection: %.3fs" % (self.connection.ip, cmd, self.connection.cpu_time)) + self.log.debug( + "Delay %s %s, cpu_time used by connection: %.3fs" % + (self.connection.ip, cmd, self.connection.cpu_time) + ) time.sleep(self.connection.cpu_time) if self.connection.cpu_time > 5: - self.connection.close() + self.connection.close("Cpu time: %.3fs" % self.connection.cpu_time) + s = time.time() if func: func(params) else: @@ -85,64 +97,77 @@ def route(self, cmd, req_id, params): if cmd not in ["getFile", "streamFile"]: taken = time.time() - s - self.connection.cpu_time += taken + taken_sent = self.connection.last_sent_time - self.connection.last_send_time + self.connection.cpu_time += taken - taken_sent # Update a site file request def actionUpdate(self, params): site = self.sites.get(params["site"]) if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) + self.connection.badAction(1) + self.connection.badAction(5) return False - if not params["inner_path"].endswith("content.json"): + inner_path = params.get("inner_path", "") + + if not inner_path.endswith("content.json"): self.response({"error": "Only content.json update allowed"}) + self.connection.badAction(5) return - content = json.loads(params["body"]) + try: + content = json.loads(params["body"]) + except Exception, err: + self.log.debug("Update for %s is invalid JSON: %s" % (inner_path, err)) + self.response({"error": "File invalid JSON"}) + self.connection.badAction(5) + return - file_uri = "%s/%s:%s" % (site.address, params["inner_path"], content["modified"]) + file_uri = "%s/%s:%s" % (site.address, inner_path, content["modified"]) if self.server.files_parsing.get(file_uri): # Check if we already working on it valid = None # Same file else: - valid = site.content_manager.verifyFile(params["inner_path"], content) + try: + valid = site.content_manager.verifyFile(inner_path, content) + except Exception, err: + self.log.debug("Update for %s is invalid: %s" % (inner_path, err)) + valid = False if valid is True: # Valid and changed - self.log.info("Update for %s/%s looks valid, saving..." % (params["site"], params["inner_path"])) + site.log.info("Update for %s looks valid, saving..." % inner_path) self.server.files_parsing[file_uri] = True - site.storage.write(params["inner_path"], params["body"]) + site.storage.write(inner_path, params["body"]) del params["body"] - site.onFileDone(params["inner_path"]) # Trigger filedone + site.onFileDone(inner_path) # Trigger filedone - if params["inner_path"].endswith("content.json"): # Download every changed file from peer - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer + if inner_path.endswith("content.json"): # Download every changed file from peer + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update") # Add or get peer # On complete publish to other peers diffs = params.get("diffs", {}) - site.onComplete.once(lambda: site.publish(inner_path=params["inner_path"], diffs=diffs, limit=2), "publish_%s" % params["inner_path"]) + site.onComplete.once(lambda: site.publish(inner_path=inner_path, diffs=diffs, limit=3), "publish_%s" % inner_path) # Load new content file and download changed files in new thread def downloader(): - site.downloadContent(params["inner_path"], peer=peer, diffs=params.get("diffs", {})) + site.downloadContent(inner_path, peer=peer, diffs=params.get("diffs", {})) del self.server.files_parsing[file_uri] gevent.spawn(downloader) else: del self.server.files_parsing[file_uri] - self.response({"ok": "Thanks, file %s updated!" % params["inner_path"]}) + self.response({"ok": "Thanks, file %s updated!" % inner_path}) self.connection.goodAction() elif valid is None: # Not changed - if params.get("peer"): - peer = site.addPeer(*params["peer"], return_peer=True) # Add or get peer - else: - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) # Add or get peer + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="update old") # Add or get peer if peer: if not peer.connection: peer.connect(self.connection) # Assign current connection to peer - if params["inner_path"] in site.content_manager.contents: - peer.last_content_json_update = site.content_manager.contents[params["inner_path"]]["modified"] + if inner_path in site.content_manager.contents: + peer.last_content_json_update = site.content_manager.contents[inner_path]["modified"] if config.verbose: self.log.debug( "Same version, adding new peer for locked files: %s, tasks: %s" % @@ -157,138 +182,138 @@ def downloader(): self.connection.badAction() else: # Invalid sign or sha hash - self.log.debug("Update for %s is invalid" % params["inner_path"]) - self.response({"error": "File invalid"}) + self.response({"error": "File invalid: %s" % err}) self.connection.badAction(5) + def isReadable(self, site, inner_path, file, pos): + return True + # Send file content request - def actionGetFile(self, params): + def handleGetFile(self, params, streaming=False): site = self.sites.get(params["site"]) if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) + self.connection.badAction(5) return False try: file_path = site.storage.getPath(params["inner_path"]) - with StreamingMsgpack.FilePart(file_path, "rb") as file: + if streaming: + file_obj = site.storage.open(params["inner_path"]) + else: + file_obj = StreamingMsgpack.FilePart(file_path, "rb") + + with file_obj as file: file.seek(params["location"]) - file.read_bytes = FILE_BUFF + read_bytes = params.get("read_bytes", FILE_BUFF) file_size = os.fstat(file.fileno()).st_size - assert params["location"] <= file_size, "Bad file location" - back = { - "body": file, - "size": file_size, - "location": min(file.tell() + FILE_BUFF, file_size) - } - self.response(back, streaming=True) - - bytes_sent = min(FILE_BUFF, file_size - params["location"]) # Number of bytes we going to send + if file_size > read_bytes: # Check if file is readable at current position (for big files) + if not self.isReadable(site, params["inner_path"], file, params["location"]): + raise RequestError("File not readable at position: %s" % params["location"]) + else: + if params.get("file_size") and params["file_size"] != file_size: + self.connection.badAction(2) + raise RequestError("File size does not match: %sB != %sB" % (params["file_size"], file_size)) + + if not streaming: + file.read_bytes = read_bytes + + + if params["location"] > file_size: + self.connection.badAction(5) + raise RequestError("Bad file location") + + if streaming: + back = { + "size": file_size, + "location": min(file.tell() + read_bytes, file_size), + "stream_bytes": min(read_bytes, file_size - params["location"]) + } + self.response(back) + self.sendRawfile(file, read_bytes=read_bytes) + else: + back = { + "body": file, + "size": file_size, + "location": min(file.tell() + file.read_bytes, file_size) + } + self.response(back, streaming=True) + + bytes_sent = min(read_bytes, file_size - params["location"]) # Number of bytes we going to send site.settings["bytes_sent"] = site.settings.get("bytes_sent", 0) + bytes_sent if config.debug_socket: self.log.debug("File %s at position %s sent %s bytes" % (file_path, params["location"], bytes_sent)) # Add peer to site if not added before - connected_peer = site.addPeer(self.connection.ip, self.connection.port) + connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request") if connected_peer: # Just added connected_peer.connect(self.connection) # Assign current connection to peer return {"bytes_sent": bytes_sent, "file_size": file_size, "location": params["location"]} + except RequestError, err: + self.log.debug("GetFile %s %s request error: %s" % (self.connection, params["inner_path"], Debug.formatException(err))) + self.response({"error": "File read error: %s" % err}) except Exception, err: - self.log.debug("GetFile read error: %s" % Debug.formatException(err)) - self.response({"error": "File read error: %s" % Debug.formatException(err)}) - return False - - # New-style file streaming out of Msgpack context - def actionStreamFile(self, params): - site = self.sites.get(params["site"]) - if not site or not site.settings["serving"]: # Site unknown or not serving - self.response({"error": "Unknown site"}) + if config.verbose: + self.log.debug("GetFile read error: %s" % Debug.formatException(err)) + self.response({"error": "File read error"}) return False - try: - if config.debug_socket: - self.log.debug("Opening file: %s" % params["inner_path"]) - with site.storage.open(params["inner_path"]) as file: - file.seek(params["location"]) - file_size = os.fstat(file.fileno()).st_size - stream_bytes = min(FILE_BUFF, file_size - params["location"]) - assert stream_bytes >= 0, "Stream bytes out of range" - - back = { - "size": file_size, - "location": min(file.tell() + FILE_BUFF, file_size), - "stream_bytes": stream_bytes - } - if config.debug_socket: - self.log.debug( - "Sending file %s from position %s to %s" % - (params["inner_path"], params["location"], back["location"]) - ) - self.response(back) - self.sendRawfile(file, read_bytes=FILE_BUFF) - - site.settings["bytes_sent"] = site.settings.get("bytes_sent", 0) + stream_bytes - if config.debug_socket: - self.log.debug("File %s at position %s sent %s bytes" % (params["inner_path"], params["location"], stream_bytes)) - - # Add peer to site if not added before - connected_peer = site.addPeer(self.connection.ip, self.connection.port) - if connected_peer: # Just added - connected_peer.connect(self.connection) # Assign current connection to peer - return {"bytes_sent": stream_bytes, "file_size": file_size, "location": params["location"]} + def actionGetFile(self, params): + return self.handleGetFile(params) - except Exception, err: - self.log.debug("GetFile read error: %s" % Debug.formatException(err)) - self.response({"error": "File read error: %s" % Debug.formatException(err)}) - return False + def actionStreamFile(self, params): + return self.handleGetFile(params, streaming=True) # Peer exchange request def actionPex(self, params): site = self.sites.get(params["site"]) if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) + self.connection.badAction(5) return False got_peer_keys = [] added = 0 # Add requester peer to site - connected_peer = site.addPeer(self.connection.ip, self.connection.port) + connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request") + if connected_peer: # It was not registered before added += 1 connected_peer.connect(self.connection) # Assign current connection to peer # Add sent peers to site - for packed_address in params.get("peers", []): + for packed_address in itertools.chain(params.get("peers", []), params.get("peers_ipv6", [])): address = helper.unpackAddress(packed_address) got_peer_keys.append("%s:%s" % address) - if site.addPeer(*address): + if site.addPeer(*address, source="pex"): added += 1 - # Add sent peers to site + # Add sent onion peers to site for packed_address in params.get("peers_onion", []): address = helper.unpackOnionAddress(packed_address) got_peer_keys.append("%s:%s" % address) - if site.addPeer(*address): + if site.addPeer(*address, source="pex"): added += 1 # Send back peers that is not in the sent list and connectable (not port 0) - packed_peers = helper.packPeers(site.getConnectablePeers(params["need"], got_peer_keys)) + packed_peers = helper.packPeers(site.getConnectablePeers(params["need"], ignore=got_peer_keys, allow_private=False)) if added: site.worker_manager.onPeers() if config.verbose: self.log.debug( "Added %s peers to %s using pex, sending back %s" % - (added, site, len(packed_peers["ip4"]) + len(packed_peers["onion"])) + (added, site, {key: len(val) for key, val in packed_peers.iteritems()}) ) - back = {} - if packed_peers["ip4"]: - back["peers"] = packed_peers["ip4"] - if packed_peers["onion"]: - back["peers_onion"] = packed_peers["onion"] + back = { + "peers": packed_peers["ipv4"], + "peers_ipv6": packed_peers["ipv6"], + "peers_onion": packed_peers["onion"] + } self.response(back) @@ -297,11 +322,12 @@ def actionListModified(self, params): site = self.sites.get(params["site"]) if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) + self.connection.badAction(5) return False modified_files = site.content_manager.listModified(params["since"]) # Add peer to site if not added before - connected_peer = site.addPeer(self.connection.ip, self.connection.port) + connected_peer = site.addPeer(self.connection.ip, self.connection.port, source="request") if connected_peer: # Just added connected_peer.connect(self.connection) # Assign current connection to peer @@ -311,10 +337,11 @@ def actionGetHashfield(self, params): site = self.sites.get(params["site"]) if not site or not site.settings["serving"]: # Site unknown or not serving self.response({"error": "Unknown site"}) + self.connection.badAction(5) return False # Add peer to site if not added before - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True) + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, source="request") if not peer.connection: # Just added peer.connect(self.connection) # Assign current connection to peer @@ -323,22 +350,15 @@ def actionGetHashfield(self, params): self.response({"hashfield_raw": site.content_manager.hashfield.tostring()}) def findHashIds(self, site, hash_ids, limit=100): - back_ip4 = {} - back_onion = {} + back = collections.defaultdict(lambda: collections.defaultdict(list)) found = site.worker_manager.findOptionalHashIds(hash_ids, limit=limit) for hash_id, peers in found.iteritems(): - back_onion[hash_id] = list(itertools.islice(( - helper.packOnionAddress(peer.ip, peer.port) - for peer in peers - if peer.ip.endswith("onion") - ), 50)) - back_ip4[hash_id] = list(itertools.islice(( - helper.packAddress(peer.ip, peer.port) - for peer in peers - if not peer.ip.endswith("onion") - ), 50)) - return back_ip4, back_onion + for peer in peers: + ip_type = helper.getIpType(peer.ip) + if len(back[ip_type][hash_id]) < 20: + back[ip_type][hash_id].append(peer.packMyAddress()) + return back def actionFindHashIds(self, params): site = self.sites.get(params["site"]) @@ -351,35 +371,23 @@ def actionFindHashIds(self, params): event_key = "%s_findHashIds_%s_%s" % (self.connection.ip, params["site"], len(params["hash_ids"])) if self.connection.cpu_time > 0.5 or not RateLimit.isAllowed(event_key, 60 * 5): time.sleep(0.1) - back_ip4, back_onion = self.findHashIds(site, params["hash_ids"], limit=10) + back = self.findHashIds(site, params["hash_ids"], limit=10) else: - back_ip4, back_onion = self.findHashIds(site, params["hash_ids"]) + back = self.findHashIds(site, params["hash_ids"]) RateLimit.called(event_key) - # Check my hashfield - if self.server.tor_manager and self.server.tor_manager.site_onions.get(site.address): # Running onion - my_ip = helper.packOnionAddress(self.server.tor_manager.site_onions[site.address], self.server.port) - my_back = back_onion - elif config.ip_external: # External ip defined - my_ip = helper.packAddress(config.ip_external, self.server.port) - my_back = back_ip4 - else: # No external ip defined - my_ip = my_ip = helper.packAddress(self.server.ip, self.server.port) - my_back = back_ip4 - + my_hashes = [] my_hashfield_set = set(site.content_manager.hashfield) for hash_id in params["hash_ids"]: if hash_id in my_hashfield_set: - if hash_id not in my_back: - my_back[hash_id] = [] - my_back[hash_id].append(my_ip) # Add myself + my_hashes.append(hash_id) if config.verbose: self.log.debug( - "Found: IP4: %s, Onion: %s for %s hashids in %.3fs" % - (len(back_ip4), len(back_onion), len(params["hash_ids"]), time.time() - s) + "Found: %s for %s hashids in %.3fs" % + ({key: len(val) for key, val in back.iteritems()}, len(params["hash_ids"]), time.time() - s) ) - self.response({"peers": back_ip4, "peers_onion": back_onion}) + self.response({"peers": back["ipv4"], "peers_onion": back["onion"], "peers_ipv6": back["ipv6"], "my": my_hashes}) def actionSetHashfield(self, params): site = self.sites.get(params["site"]) @@ -389,36 +397,30 @@ def actionSetHashfield(self, params): return False # Add or get peer - peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection) + peer = site.addPeer(self.connection.ip, self.connection.port, return_peer=True, connection=self.connection, source="request") if not peer.connection: peer.connect(self.connection) peer.hashfield.replaceFromString(params["hashfield_raw"]) self.response({"ok": "Updated"}) - def actionSiteReload(self, params): - if self.connection.ip != "127.0.0.1" and self.connection.ip != config.ip_external: - self.response({"error": "Only local host allowed"}) - - site = self.sites.get(params["site"]) - site.content_manager.loadContent(params["inner_path"], add_bad_files=False) - site.storage.verifyFiles(quick_check=True) - site.updateWebsocket() - - self.response({"ok": "Reloaded"}) - - def actionSitePublish(self, params): - if self.connection.ip != "127.0.0.1" and self.connection.ip != config.ip_external: - self.response({"error": "Only local host allowed"}) - - site = self.sites.get(params["site"]) - num = site.publish(limit=8, inner_path=params.get("inner_path", "content.json"), diffs=params.get("diffs", {})) - - self.response({"ok": "Successfuly published to %s peers" % num}) - # Send a simple Pong! answer def actionPing(self, params): self.response("Pong!") + # Check requested port of the other peer + def actionCheckport(self, params): + if helper.getIpType(self.connection.ip) == "ipv6": + sock_address = (self.connection.ip, params["port"], 0, 0) + else: + sock_address = (self.connection.ip, params["port"]) + + with closing(helper.createSocket(self.connection.ip)) as sock: + sock.settimeout(5) + if sock.connect_ex(sock_address) == 0: + self.response({"status": "open", "ip_external": self.connection.ip}) + else: + self.response({"status": "closed", "ip_external": self.connection.ip}) + # Unknown command def actionUnknown(self, cmd, params): self.response({"error": "Unknown command: %s" % cmd}) diff --git a/src/File/FileServer.py b/src/File/FileServer.py index 948f799ed..0e167a6f0 100644 --- a/src/File/FileServer.py +++ b/src/File/FileServer.py @@ -1,33 +1,127 @@ import logging -import urllib2 -import re import time +import random import socket import gevent +import gevent.pool +from gevent.server import StreamServer import util +from util import helper from Config import config from FileRequest import FileRequest +from Peer import PeerPortchecker from Site import SiteManager -from Debug import Debug from Connection import ConnectionServer -from util import UpnpPunch +from Plugin import PluginManager +from Debug import Debug +@PluginManager.acceptPlugins class FileServer(ConnectionServer): - def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port): - ConnectionServer.__init__(self, ip, port, self.handleRequest) - if config.ip_external: # Ip external defined in arguments - self.port_opened = True - SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist + def __init__(self, ip=config.fileserver_ip, port=config.fileserver_port, ip_type=config.fileserver_ip_type): + self.site_manager = SiteManager.site_manager + self.portchecker = PeerPortchecker.PeerPortchecker(self) + self.log = logging.getLogger("FileServer") + self.ip_type = ip_type + self.ip_external_list = [] + + self.supported_ip_types = ["ipv4"] # Outgoing ip_type support + if helper.getIpType(ip) == "ipv6" or self.isIpv6Supported(): + self.supported_ip_types.append("ipv6") + + if ip_type == "ipv6" or (ip_type == "dual" and "ipv6" in self.supported_ip_types): + ip = ip.replace("*", "::") else: - self.port_opened = None # Is file server opened on router - self.upnp_port_opened = False + ip = ip.replace("*", "0.0.0.0") + + if config.tor == "always": + port = config.tor_hs_port + config.fileserver_port = port + elif port == 0: # Use random port + port_range_from, port_range_to = map(int, config.fileserver_port_range.split("-")) + port = self.getRandomPort(ip, port_range_from, port_range_to) + config.fileserver_port = port + if not port: + raise Exception("Can't find bindable port") + if not config.tor == "always": + config.saveValue("fileserver_port", port) # Save random port value for next restart + + ConnectionServer.__init__(self, ip, port, self.handleRequest) + self.log.debug("Supported IP types: %s" % self.supported_ip_types) + + if ip_type == "dual" and ip == "::": + # Also bind to ipv4 addres in dual mode + try: + self.log.debug("Binding proxy to %s:%s" % ("::", self.port)) + self.stream_server_proxy = StreamServer( + ("0.0.0.0", self.port), self.handleIncomingConnection, spawn=self.pool, backlog=100 + ) + except Exception, err: + self.log.info("StreamServer proxy create error: %s" % Debug.formatException(err)) + + self.port_opened = {} + self.sites = {} self.last_request = time.time() self.files_parsing = {} + self.ui_server = None + + def getRandomPort(self, ip, port_range_from, port_range_to): + self.log.info("Getting random port in range %s-%s..." % (port_range_from, port_range_to)) + tried = [] + for bind_retry in range(100): + port = random.randint(port_range_from, port_range_to) + if port in tried: + continue + tried.append(port) + sock = helper.createSocket(ip) + try: + sock.bind((ip, port)) + success = True + except Exception as err: + self.log.warning("Error binding to port %s: %s" % (port, err)) + success = False + sock.close() + if success: + self.log.info("Found unused random port: %s" % port) + return port + else: + time.sleep(0.1) + return False + + def isIpv6Supported(self): + if config.tor == "always": + return True + # Test if we can connect to ipv6 address + ipv6_testip = "fcec:ae97:8902:d810:6c92:ec67:efb2:3ec5" + try: + sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM) + sock.connect((ipv6_testip, 80)) + local_ipv6 = sock.getsockname()[0] + if local_ipv6 == "::1": + self.log.debug("IPv6 not supported, no local IPv6 address") + return False + else: + self.log.debug("IPv6 supported on IP %s" % local_ipv6) + return True + except socket.error as err: + self.log.error("IPv6 not supported: %s" % err) + return False + except Exception as err: + self.log.error("IPv6 check error: %s" % err) + return False + + def listenProxy(self): + try: + self.stream_server_proxy.serve_forever() + except Exception, err: + if err.errno == 98: # Address already in use error + self.log.debug("StreamServer proxy listen error: %s" % err) + else: + self.log.info("StreamServer proxy listen error: %s" % err) # Handle request to fileserver def handleRequest(self, connection, message): @@ -41,7 +135,7 @@ def handleRequest(self, connection, message): self.log.debug("FileRequest: %s %s" % (str(connection), message["cmd"])) req = FileRequest(self, connection) req.route(message["cmd"], message.get("req_id"), message.get("params")) - if not self.has_internet: + if not self.has_internet and not connection.is_private_ip: self.has_internet = True self.onInternetOnline() @@ -55,117 +149,70 @@ def reload(self): import imp FileRequest = imp.load_source("FileRequest", "src/File/FileRequest.py").FileRequest - # Try to open the port using upnp - def openport(self, port=None, check=True): - if not port: - port = self.port - if self.port_opened: - return True # Port already opened - if check: # Check first if its already opened - time.sleep(1) # Wait for port open - if self.testOpenport(port, use_alternative=False)["result"] is True: - return True # Port already opened - - if config.tor == "always": # Port opening won't work in Tor mode - return False + def portCheck(self): + if config.ip_external: + for ip_external in config.ip_external: + SiteManager.peer_blacklist.append((ip_external, self.port)) # Add myself to peer blacklist + + ip_external_types = set([helper.getIpType(ip) for ip in config.ip_external]) + res = { + "ipv4": "ipv4" in ip_external_types, + "ipv6": "ipv6" in ip_external_types + } + self.ip_external_list = config.ip_external + self.port_opened.update(res) + self.log.info("Server port opened based on configuration ipv4: %s, ipv6: %s" % (res["ipv4"], res["ipv6"])) + return res + + self.port_opened = {} + if self.ui_server: + self.ui_server.updateWebsocket() + + if "ipv6" in self.supported_ip_types: + res_ipv6_thread = gevent.spawn(self.portchecker.portCheck, self.port, "ipv6") + else: + res_ipv6_thread = None - self.log.info("Trying to open port using UpnpPunch...") - try: - UpnpPunch.ask_to_open_port(self.port, 'ZeroNet', retries=3, protos=["TCP"]) - except (UpnpPunch.UpnpError, UpnpPunch.IGDError, socket.error) as err: - self.log.error("UpnpPunch run error: %s" % - Debug.formatException(err)) - return False + res_ipv4 = self.portchecker.portCheck(self.port, "ipv4") + if not res_ipv4["opened"] and config.tor != "always": + if self.portchecker.portOpen(self.port): + res_ipv4 = self.portchecker.portCheck(self.port, "ipv4") - if self.testOpenport(port)["result"] is True: - self.upnp_port_opened = True - return True + if res_ipv6_thread is None: + res_ipv6 = {"ip": None, "opened": None} + else: + res_ipv6 = res_ipv6_thread.get() + if res_ipv6["opened"] and not helper.getIpType(res_ipv6["ip"]) == "ipv6": + self.log.info("Invalid IPv6 address from port check: %s" % res_ipv6["ip"]) + res_ipv6["opened"] = False - self.log.info("Upnp mapping failed :( Please forward port %s on your router to your ipaddress" % port) - return False + self.ip_external_list = [] + for res_ip in [res_ipv4, res_ipv6]: + if res_ip["ip"] and res_ip["ip"] not in self.ip_external_list: + self.ip_external_list.append(res_ip["ip"]) + SiteManager.peer_blacklist.append((res_ip["ip"], self.port)) - # Test if the port is open - def testOpenport(self, port=None, use_alternative=True): - if not port: - port = self.port - back = self.testOpenportPortchecker(port) - if back["result"] is not True and use_alternative: # If no success try alternative checker - return self.testOpenportCanyouseeme(port) - else: - return back + self.log.info("Server port opened ipv4: %s, ipv6: %s" % (res_ipv4["opened"], res_ipv6["opened"])) - def testOpenportPortchecker(self, port=None): - self.log.info("Checking port %s using portchecker.co..." % port) - try: - data = urllib2.urlopen("http://portchecker.co/check", "port=%s" % port, timeout=20.0).read() - message = re.match('.*
    (.*?)
    ', data, re.DOTALL).group(1) - message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags - except Exception, err: - message = "Error: %s" % Debug.formatException(err) - data = "" - - if "closed" in message or "Error" in message: - if config.tor != "always": - self.log.info("[BAD :(] Port closed: %s" % message) - if port == self.port: - self.port_opened = False # Self port, update port_opened status - match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message - if match: # Found my ip in message - config.ip_external = match.group(1) - SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist - else: - config.ip_external = False - return {"result": False, "message": message} - else: - self.log.info("[OK :)] Port open: %s" % message) - if port == self.port: # Self port, update port_opened status - self.port_opened = True - match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) # Try find my external ip in message - if match: # Found my ip in message - config.ip_external = match.group(1) - SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist - else: - config.ip_external = False - return {"result": True, "message": message} - - def testOpenportCanyouseeme(self, port=None): - self.log.info("Checking port %s using canyouseeme.org..." % port) - try: - data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read() - message = re.match('.*

    (.*?)

    ', data, re.DOTALL).group(1) - message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ")) # Strip http tags - except Exception, err: - message = "Error: %s" % Debug.formatException(err) - - if "Error" in message: - if config.tor != "always": - self.log.info("[BAD :(] Port closed: %s" % message) - if port == self.port: - self.port_opened = False # Self port, update port_opened status - match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message - if match: # Found my ip in message - config.ip_external = match.group(1) - SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist - else: - config.ip_external = False - return {"result": False, "message": message} - else: - self.log.info("[OK :)] Port open: %s" % message) - if port == self.port: # Self port, update port_opened status - self.port_opened = True - match = re.match(".*?([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", message) # Try find my external ip in message - if match: # Found my ip in message - config.ip_external = match.group(1) - SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist - else: - config.ip_external = False - return {"result": True, "message": message} - - # Set external ip without testing - def setIpExternal(self, ip_external): - logging.info("Setting external ip without testing: %s..." % ip_external) - config.ip_external = ip_external - self.port_opened = True + res = {"ipv4": res_ipv4["opened"], "ipv6": res_ipv6["opened"]} + + # Add external IPs from local interfaces + interface_ips = helper.getInterfaceIps("ipv4") + if "ipv6" in self.supported_ip_types: + interface_ips += helper.getInterfaceIps("ipv6") + for ip in interface_ips: + if not helper.isPrivateIp(ip) and ip not in self.ip_external_list: + self.ip_external_list.append(ip) + res[helper.getIpType(ip)] = True # We have opened port if we have external ip + SiteManager.peer_blacklist.append((ip, self.port)) + self.log.debug("External ip found on interfaces: %s" % ip) + + self.port_opened.update(res) + + if self.ui_server: + self.ui_server.updateWebsocket() + + return res # Check site file integrity def checkSite(self, site, check_files=False): @@ -174,74 +221,108 @@ def checkSite(self, site, check_files=False): site.update(check_files=check_files) # Update site's content.json and download changed files site.sendMyHashfield() site.updateHashfield() - if len(site.peers) > 5: # Keep active connections if site having 5 or more peers - site.needConnections() # Check sites integrity @util.Noparallel() def checkSites(self, check_files=False, force_port_check=False): self.log.debug("Checking sites...") + s = time.time() sites_checking = False - if self.port_opened is None or force_port_check: # Test and open port if not tested yet + if not self.port_opened or force_port_check: # Test and open port if not tested yet if len(self.sites) <= 2: # Don't wait port opening on first startup sites_checking = True for address, site in self.sites.items(): gevent.spawn(self.checkSite, site, check_files) - if force_port_check: - self.port_opened = None - self.openport() - if self.port_opened is False: + self.portCheck() + + if not self.port_opened["ipv4"]: self.tor_manager.startOnions() if not sites_checking: - for address, site in self.sites.items(): # Check sites integrity - gevent.spawn(self.checkSite, site, check_files) # Check in new thread - time.sleep(2) # Prevent too quick request - - def trackersFileReloader(self): - while 1: - config.loadTrackersFile() - time.sleep(60) + check_pool = gevent.pool.Pool(5) + # Check sites integrity + for site in sorted(self.sites.values(), key=lambda site: site.settings.get("modified", 0), reverse=True): + if not site.settings["serving"]: + continue + check_thread = check_pool.spawn(self.checkSite, site, check_files) # Check in new thread + time.sleep(2) + if site.settings.get("modified", 0) < time.time() - 60 * 60 * 24: # Not so active site, wait some sec to finish + check_thread.join(timeout=5) + self.log.debug("Checksites done in %.3fs" % (time.time() - s)) - # Announce sites every 20 min - def announceSites(self): + def cleanupSites(self): import gc - if config.trackers_file: - gevent.spawn(self.trackersFileReloader) + startup = True + time.sleep(5 * 60) # Sites already cleaned up on startup + peers_protected = set([]) while 1: # Sites health care every 20 min + self.log.debug( + "Running site cleanup, connections: %s, internet: %s, protected peers: %s" % + (len(self.connections), self.has_internet, len(peers_protected)) + ) + for address, site in self.sites.items(): if not site.settings["serving"]: continue + + if not startup: + site.cleanupPeers(peers_protected) + + time.sleep(1) # Prevent too quick request + + peers_protected = set([]) + for address, site in self.sites.items(): + if not site.settings["serving"]: + continue + if site.peers: - site.announcePex() + with gevent.Timeout(10, exception=False): + site.announcer.announcePex() # Retry failed files if site.bad_files: site.retryBadFiles() - site.cleanupPeers() + if time.time() - site.settings.get("modified", 0) < 60 * 60 * 24 * 7: + # Keep active connections if site has been modified witin 7 days + connected_num = site.needConnections(check_site_on_reconnect=True) - site.needConnections() # Keep 5 active peer connection to get the updates + if connected_num < config.connected_limit: # This site has small amount of peers, protect them from closing + peers_protected.update([peer.key for peer in site.getConnectedPeers()]) - time.sleep(2) # Prevent too quick request + time.sleep(1) # Prevent too quick request site = None gc.collect() # Implicit garbage collection + startup = False + time.sleep(60 * 20) - # Find new peers - for tracker_i in range(len(config.trackers)): - time.sleep(60 * 20 / len(config.trackers)) # Query all trackers one-by-one in 20 minutes evenly distributed - for address, site in self.sites.items(): - if not site.settings["serving"]: - continue - site.announce(mode="update", pex=False) - if site.settings["own"]: # Check connections more frequently on own sites to speed-up first connections - site.needConnections() - site.sendMyHashfield(3) - site.updateHashfield(3) - time.sleep(2) + def announceSite(self, site): + site.announce(mode="update", pex=False) + active_site = time.time() - site.settings.get("modified", 0) < 24 * 60 * 60 + if site.settings["own"] or active_site: # Check connections more frequently on own and active sites to speed-up first connections + site.needConnections(check_site_on_reconnect=True) + site.sendMyHashfield(3) + site.updateHashfield(3) + + # Announce sites every 20 min + def announceSites(self): + time.sleep(5 * 60) # Sites already announced on startup + while 1: + config.loadTrackersFile() + s = time.time() + for address, site in self.sites.items(): + if not site.settings["serving"]: + continue + gevent.spawn(self.announceSite, site).join(timeout=10) + time.sleep(1) + taken = time.time() - s + + sleep = max(0, 60 * 20 / len(config.trackers) - taken) # Query all trackers one-by-one in 20 minutes evenly distributed + self.log.debug("Site announce tracker done in %.3fs, sleeping for %.3fs..." % (taken, sleep)) + time.sleep(sleep) # Detects if computer back from wakeup def wakeupWatcher(self): @@ -259,9 +340,8 @@ def wakeupWatcher(self): # Bind and start serving sites def start(self, check_sites=True): - self.sites = SiteManager.site_manager.list() - self.log = logging.getLogger("FileServer") - + ConnectionServer.start(self) + self.sites = self.site_manager.list() if config.debug: # Auto reload FileRequest on change from Debug import DebugReloader @@ -271,18 +351,20 @@ def start(self, check_sites=True): gevent.spawn(self.checkSites) thread_announce_sites = gevent.spawn(self.announceSites) + thread_cleanup_sites = gevent.spawn(self.cleanupSites) thread_wakeup_watcher = gevent.spawn(self.wakeupWatcher) - ConnectionServer.start(self) + ConnectionServer.listen(self) self.log.debug("Stopped.") def stop(self): - if self.running and self.upnp_port_opened: + if self.running and self.portchecker.upnp_port_opened: self.log.debug('Closing port %d' % self.port) try: - UpnpPunch.ask_to_close_port(self.port, protos=["TCP"]) + self.portchecker.portClose(self.port) self.log.info('Closed port via upnp.') - except (UpnpPunch.UpnpError, UpnpPunch.IGDError), err: + except Exception as err: self.log.info("Failed at attempt to use upnp to close port: %s" % err) - ConnectionServer.stop(self) + + return ConnectionServer.stop(self) diff --git a/src/Peer/Peer.py b/src/Peer/Peer.py index ad5cf8dd8..536ecf410 100644 --- a/src/Peer/Peer.py +++ b/src/Peer/Peer.py @@ -1,6 +1,8 @@ import logging import time import sys +import itertools +import collections import gevent @@ -9,16 +11,18 @@ from Config import config from util import helper from PeerHashfield import PeerHashfield +from Plugin import PluginManager if config.use_tempfiles: import tempfile # Communicate remote peers +@PluginManager.acceptPlugins class Peer(object): __slots__ = ( - "ip", "port", "site", "key", "connection", "connection_server", "time_found", "time_response", "time_hashfield", "time_added", "has_hashfield", - "time_my_hashfield_sent", "last_ping", "last_content_json_update", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time" + "ip", "port", "site", "key", "connection", "connection_server", "time_found", "time_response", "time_hashfield", "time_added", "has_hashfield", "is_tracker_connection", + "time_my_hashfield_sent", "last_ping", "reputation", "last_content_json_update", "hashfield", "connection_error", "hash_failed", "download_bytes", "download_time" ) def __init__(self, ip, port, site=None, connection_server=None): @@ -36,6 +40,8 @@ def __init__(self, ip, port, site=None, connection_server=None): self.time_response = None # Time of last successful response from peer self.time_added = time.time() self.last_ping = None # Last response time for ping + self.is_tracker_connection = False # Tracker connection instead of normal peer + self.reputation = 0 # More likely to connect if larger self.last_content_json_update = 0.0 # Modify date of last received content.json self.connection_error = 0 # Series of connection error @@ -61,13 +67,19 @@ def log(self, text): # Connect to host def connect(self, connection=None): + if self.reputation < -10: + self.reputation = -10 + if self.reputation > 10: + self.reputation = 10 + if self.connection: self.log("Getting connection (Closing %s)..." % self.connection) - self.connection.close() + self.connection.close("Connection change") else: - self.log("Getting connection...") + self.log("Getting connection (reputation: %s)..." % self.reputation) if connection: # Connection specified + self.log("Assigning connection %s" % connection) self.connection = connection self.connection.sites += 1 else: # Try to find from connection pool or create new connection @@ -75,18 +87,20 @@ def connect(self, connection=None): try: if self.connection_server: - self.connection = self.connection_server.getConnection(self.ip, self.port, site=self.site) + connection_server = self.connection_server elif self.site: - self.connection = self.site.connection_server.getConnection(self.ip, self.port, site=self.site) + connection_server = self.site.connection_server else: - self.connection = sys.modules["main"].file_server.getConnection(self.ip, self.port, site=self.site) + connection_server = sys.modules["main"].file_server + self.connection = connection_server.getConnection(self.ip, self.port, site=self.site, is_tracker_connection=self.is_tracker_connection) + self.reputation += 1 self.connection.sites += 1 - except Exception, err: - self.onConnectionError() + self.onConnectionError("Getting connection error") self.log("Getting connection error: %s (connection_error: %s, hash_failed: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed)) self.connection = None + return self.connection # Check if we have connection to peer def findConnection(self): @@ -110,8 +124,16 @@ def packMyAddress(self): else: return helper.packAddress(self.ip, self.port) - # Found a peer on tracker - def found(self): + # Found a peer from a source + def found(self, source="other"): + if self.reputation < 5: + if source == "tracker": + self.reputation += 1 + elif source == "local": + self.reputation += 3 + + if source in ("tracker", "local"): + self.site.peers_recent.appendleft(self) self.time_found = time.time() # Send a command to peer and return response value @@ -119,29 +141,35 @@ def request(self, cmd, params={}, stream_to=None): if not self.connection or self.connection.closed: self.connect() if not self.connection: - self.onConnectionError() + self.onConnectionError("Reconnect error") return None # Connection failed - self.log("Send request: %s %s" % (params.get("site", ""), cmd)) + self.log("Send request: %s %s %s %s" % (params.get("site", ""), cmd, params.get("inner_path", ""), params.get("location", ""))) - for retry in range(1, 4): # Retry 3 times + for retry in range(1, 2): # Retry 1 times try: + if not self.connection: + raise Exception("No connection found") res = self.connection.request(cmd, params, stream_to) if not res: raise Exception("Send error") if "error" in res: self.log("%s error: %s" % (cmd, res["error"])) - self.onConnectionError() + self.onConnectionError("Response error") + break else: # Successful request, reset connection error num self.connection_error = 0 self.time_response = time.time() - return res + if res: + return res + else: + raise Exception("Invalid response: %s" % res) except Exception, err: if type(err).__name__ == "Notify": # Greenlet killed by worker self.log("Peer worker got killed: %s, aborting cmd: %s" % (err.message, cmd)) break else: - self.onConnectionError() + self.onConnectionError("Request error") self.log( "%s (connection_error: %s, hash_failed: %s, retry: %s)" % (Debug.formatException(err), self.connection_error, self.hash_failed, retry) @@ -151,62 +179,56 @@ def request(self, cmd, params={}, stream_to=None): return None # Failed after 4 retry # Get a file content from peer - def getFile(self, site, inner_path): - # Use streamFile if client supports it - if config.stream_downloads and self.connection and self.connection.handshake and self.connection.handshake["rev"] > 310: - return self.streamFile(site, inner_path) - - location = 0 - if config.use_tempfiles: - buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b') + def getFile(self, site, inner_path, file_size=None, pos_from=0, pos_to=None, streaming=False): + if file_size and file_size > 5 * 1024 * 1024: + max_read_size = 1024 * 1024 else: - buff = StringIO() - - s = time.time() - while True: # Read in 512k parts - res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location}) + max_read_size = 512 * 1024 - if not res or "body" not in res: # Error - return False + if pos_to: + read_bytes = min(max_read_size, pos_to - pos_from) + else: + read_bytes = max_read_size - buff.write(res["body"]) - res["body"] = None # Save memory - if res["location"] == res["size"]: # End of file - break - else: - location = res["location"] + location = pos_from - self.download_bytes += res["location"] - self.download_time += (time.time() - s) - if self.site: - self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + res["location"] - buff.seek(0) - return buff - - # Download file out of msgpack context to save memory and cpu - def streamFile(self, site, inner_path): - location = 0 if config.use_tempfiles: buff = tempfile.SpooledTemporaryFile(max_size=16 * 1024, mode='w+b') else: buff = StringIO() s = time.time() - while True: # Read in 512k parts - res = self.request("streamFile", {"site": site, "inner_path": inner_path, "location": location}, stream_to=buff) - - if not res or "location" not in res: # Error - self.log("Invalid response: %s" % res) - return False - - if res["location"] == res["size"]: # End of file + while True: # Read in smaller parts + if config.stream_downloads or read_bytes > 256 * 1024 or streaming: + res = self.request("streamFile", {"site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size}, stream_to=buff) + if not res or "location" not in res: # Error + return False + else: + self.log("Send: %s" % inner_path) + res = self.request("getFile", {"site": site, "inner_path": inner_path, "location": location, "read_bytes": read_bytes, "file_size": file_size}) + if not res or "location" not in res: # Error + return False + self.log("Recv: %s" % inner_path) + buff.write(res["body"]) + res["body"] = None # Save memory + + if res["location"] == res["size"] or res["location"] == pos_to: # End of file break else: location = res["location"] + if pos_to: + read_bytes = min(max_read_size, pos_to - location) + + if pos_to: + recv = pos_to - pos_from + else: + recv = res["location"] - self.download_bytes += res["location"] + self.download_bytes += recv self.download_time += (time.time() - s) - self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + res["location"] + if self.site: + self.site.settings["bytes_recv"] = self.site.settings.get("bytes_recv", 0) + recv + self.log("Downloaded: %s, pos: %s, read_bytes: %s" % (inner_path, buff.tell(), read_bytes)) buff.seek(0) return buff @@ -222,7 +244,7 @@ def ping(self): response_time = time.time() - s break # All fine, exit from for loop # Timeout reached or bad response - self.onConnectionError() + self.onConnectionError("Ping timeout") self.connect() time.sleep(1) @@ -239,23 +261,37 @@ def pex(self, site=None, need_num=5): site = self.site # If no site defined request peers for this site # give back 5 connectible peers - packed_peers = helper.packPeers(self.site.getConnectablePeers(5)) - request = {"site": site.address, "peers": packed_peers["ip4"], "need": need_num} + packed_peers = helper.packPeers(self.site.getConnectablePeers(5, allow_private=False)) + request = {"site": site.address, "peers": packed_peers["ipv4"], "need": need_num} if packed_peers["onion"]: request["peers_onion"] = packed_peers["onion"] + if packed_peers["ipv6"]: + request["peers_ipv6"] = packed_peers["ipv6"] + res = self.request("pex", request) + if not res or "error" in res: return False + added = 0 - # Ip4 - for peer in res.get("peers", []): + + # Remove unsupported peer types + if "peers_ipv6" in res and "ipv6" not in self.connection.server.supported_ip_types: + del res["peers_ipv6"] + + if "peers_onion" in res and "onion" not in self.connection.server.supported_ip_types: + del res["peers_onion"] + + # Add IPv4 + IPv6 + for peer in itertools.chain(res.get("peers", []), res.get("peers_ipv6", [])): address = helper.unpackAddress(peer) - if site.addPeer(*address): + if site.addPeer(*address, source="pex"): added += 1 - # Onion + + # Add Onion for peer in res.get("peers_onion", []): address = helper.unpackOnionAddress(peer) - if site.addPeer(*address): + if site.addPeer(*address, source="pex"): added += 1 if added: @@ -269,13 +305,13 @@ def listModified(self, since): return self.request("listModified", {"since": since, "site": self.site.address}) def updateHashfield(self, force=False): - # Don't update hashfield again in 15 min - if self.time_hashfield and time.time() - self.time_hashfield > 60 * 15 and not force: + # Don't update hashfield again in 5 min + if self.time_hashfield and time.time() - self.time_hashfield < 5 * 60 and not force: return False self.time_hashfield = time.time() res = self.request("getHashfield", {"site": self.site.address}) - if not res or "error" in res: + if not res or "error" in res or "hashfield_raw" not in res: return False self.hashfield.replaceFromString(res["hashfield_raw"]) @@ -285,15 +321,26 @@ def updateHashfield(self, force=False): # Return: {hash1: ["ip:port", "ip:port",...],...} def findHashIds(self, hash_ids): res = self.request("findHashIds", {"site": self.site.address, "hash_ids": hash_ids}) - if not res or "error" in res: + if not res or "error" in res or type(res) is not dict: return False - # Unpack IP4 - back = {key: map(helper.unpackAddress, val) for key, val in res["peers"].items()[0:30]} - # Unpack onion - for hash, onion_peers in res.get("peers_onion", {}).items()[0:30]: - if not hash in back: - back[hash] = [] - back[hash] += map(helper.unpackOnionAddress, onion_peers) + + back = collections.defaultdict(list) + + for ip_type in ["ipv4", "ipv6", "onion"]: + if ip_type == "ipv4": + key = "peers" + else: + key = "peers_%s" % ip_type + for hash, peers in res.get(key, {}).items()[0:30]: + if ip_type == "onion": + unpacker_func = helper.unpackOnionAddress + else: + unpacker_func = helper.unpackAddress + + back[hash] += map(unpacker_func, peers) + + for hash in res.get("my", []): + back[hash].append((self.connection.ip, self.connection.port)) return back @@ -313,20 +360,29 @@ def sendMyHashfield(self): return True # Stop and remove from site - def remove(self): + def remove(self, reason="Removing"): self.log("Removing peer...Connection error: %s, Hash failed: %s" % (self.connection_error, self.hash_failed)) if self.site and self.key in self.site.peers: del(self.site.peers[self.key]) + + if self.site and self in self.site.peers_recent: + self.site.peers_recent.remove(self) + if self.connection: - self.connection.close() + self.connection.close(reason) # - EVENTS - # On connection error - def onConnectionError(self): + def onConnectionError(self, reason="Unknown"): self.connection_error += 1 - if self.connection_error >= 3: # Dead peer - self.remove() + if self.site and len(self.site.peers) > 200: + limit = 3 + else: + limit = 6 + self.reputation -= 1 + if self.connection_error >= limit: # Dead peer + self.remove("Peer connection: %s" % reason) # Done working with peer def onWorkerDone(self): diff --git a/src/Peer/PeerPortchecker.py b/src/Peer/PeerPortchecker.py new file mode 100644 index 000000000..5bcf91dfd --- /dev/null +++ b/src/Peer/PeerPortchecker.py @@ -0,0 +1,171 @@ +import logging +import urllib +import urllib2 +import re +import time + +from Debug import Debug +from util import UpnpPunch + + +class PeerPortchecker(object): + def __init__(self, file_server): + self.log = logging.getLogger("PeerPortchecker") + self.upnp_port_opened = False + self.file_server = file_server + + def requestUrl(self, url, post_data=None): + if type(post_data) is dict: + post_data = urllib.urlencode(post_data) + req = urllib2.Request(url, post_data) + req.add_header('Referer', url) + return urllib2.urlopen(req, timeout=20.0) + + def portOpen(self, port): + self.log.info("Trying to open port using UpnpPunch...") + + try: + UpnpPunch.ask_to_open_port(port, 'ZeroNet', retries=3, protos=["TCP"]) + self.upnp_port_opened = True + except Exception as err: + self.log.warning("UpnpPunch run error: %s" % Debug.formatException(err)) + return False + + return True + + def portClose(self, port): + return UpnpPunch.ask_to_close_port(port, protos=["TCP"]) + + def portCheck(self, port, ip_type="ipv4"): + if ip_type == "ipv6": + checker_functions = ["checkMyaddr", "checkIpv6scanner"] + else: + checker_functions = ["checkPortchecker", "checkCanyouseeme"] + + for func_name in checker_functions: + func = getattr(self, func_name) + s = time.time() + try: + res = func(port) + if res: + self.log.info( + "Checking port %s (%s) using %s result: %s in %.3fs" % + (port, ip_type, func_name, res, time.time() - s) + ) + time.sleep(0.1) + if res["opened"] and not self.file_server.had_external_incoming: + res["opened"] = False + self.log.warning("Port %s:%s, but no incoming connection" % (res["ip"], port)) + break + except Exception as err: + self.log.warning( + "%s check error: %s in %.3fs" % + (func_name, Debug.formatException(err), time.time() - s) + ) + res = {"ip": None, "opened": False} + + return res + + def checkCanyouseeme(self, port): + data = urllib2.urlopen("http://www.canyouseeme.org/", "port=%s" % port, timeout=20.0).read() + message = re.match('.*

    (.*?)

    ', data, re.DOTALL).group(1) + message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ")) # Strip http tags + + match = re.match(".*service on (.*?) on", message) + if match: + ip = match.group(1) + else: + raise Exception("Invalid response: %s" % message) + + if "Success" in message: + return {"ip": ip, "opened": True} + elif "Error" in message: + return {"ip": ip, "opened": False} + else: + raise Exception("Invalid response: %s" % message) + + def checkPortchecker(self, port): + data = urllib2.urlopen("https://portchecker.co/check", "port=%s" % port, timeout=20.0).read() + message = re.match('.*
    (.*?)
    ', data, re.DOTALL).group(1) + message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags + + match = re.match(".*targetIP.*?value=\"(.*?)\"", data, re.DOTALL) + if match: + ip = match.group(1) + else: + raise Exception("Invalid response: %s" % message) + + if "open" in message: + return {"ip": ip, "opened": True} + elif "closed" in message: + return {"ip": ip, "opened": False} + else: + raise Exception("Invalid response: %s" % message) + + def checkSubnetonline(self, port): + url = "https://www.subnetonline.com/pages/ipv6-network-tools/online-ipv6-port-scanner.php" + + data = self.requestUrl(url).read() + + ip = re.match('.*Your IP is.*?name="host".*?value="(.*?)"', data, re.DOTALL).group(1) + token = re.match('.*name="token".*?value="(.*?)"', data, re.DOTALL).group(1) + print ip + + post_data = {"host": ip, "port": port, "allow": "on", "token": token, "submit": "Scanning.."} + data = self.requestUrl(url, post_data).read() + + message = re.match(".*
    (.*?)
    ", data, re.DOTALL).group(1) + message = re.sub("<.*?>", "", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags + + if "online" in message: + return {"ip": ip, "opened": True} + elif "closed" in message: + return {"ip": ip, "opened": False} + else: + raise Exception("Invalid response: %s" % message) + + def checkMyaddr(self, port): + url = "http://ipv6.my-addr.com/online-ipv6-port-scan.php" + + data = self.requestUrl(url).read() + + ip = re.match('.*Your IP address is:[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) + + post_data = {"addr": ip, "ports_selected": "", "ports_list": port} + data = self.requestUrl(url, post_data).read() + + message = re.match(".*(.*?)
    ", data, re.DOTALL).group(1) + + if "ok.png" in message: + return {"ip": ip, "opened": True} + elif "fail.png" in message: + return {"ip": ip, "opened": False} + else: + raise Exception("Invalid response: %s" % message) + + def checkIpv6scanner(self, port): + url = "http://www.ipv6scanner.com/cgi-bin/main.py" + + data = self.requestUrl(url).read() + + ip = re.match('.*Your IP address is[ ]*([0-9\.:a-z]+)', data.replace(" ", ""), re.DOTALL).group(1) + + post_data = {"host": ip, "scanType": "1", "port": port, "protocol": "tcp", "authorized": "yes"} + data = self.requestUrl(url, post_data).read() + + message = re.match(".*(.*?)
    ", data, re.DOTALL).group(1) + message_text = re.sub("<.*?>", " ", message.replace("
    ", " ").replace(" ", " ").strip()) # Strip http tags + + if "OPEN" in message_text: + return {"ip": ip, "opened": True} + elif "CLOSED" in message_text or "FILTERED" in message_text: + return {"ip": ip, "opened": False} + else: + raise Exception("Invalid response: %s" % message_text) + +if __name__ == "__main__": + import time + peer_portchecker = PeerPortchecker() + for func_name in ["checkIpv6scanner", "checkMyaddr", "checkPortchecker", "checkCanyouseeme"]: + s = time.time() + print(func_name, getattr(peer_portchecker, func_name)(3894), "%.3fs" % (time.time() - s)) diff --git a/src/Plugin/PluginManager.py b/src/Plugin/PluginManager.py index a78efb59f..c5d1f79b1 100644 --- a/src/Plugin/PluginManager.py +++ b/src/Plugin/PluginManager.py @@ -1,6 +1,8 @@ import logging import os import sys +import shutil +import time from collections import defaultdict from Debug import Debug @@ -15,18 +17,27 @@ def __init__(self): self.subclass_order = {} # Record the load order of the plugins, to keep it after reload self.pluggable = {} self.plugin_names = [] # Loaded plugin names + self.after_load = [] # Execute functions after loaded plugins - sys.path.append(self.plugin_path) + sys.path.append(os.path.join(os.getcwd(), self.plugin_path)) + self.migratePlugins() if config.debug: # Auto reload Plugins on file change from Debug import DebugReloader DebugReloader(self.reloadPlugins) + def migratePlugins(self): + for dir_name in os.listdir(self.plugin_path): + if dir_name == "Mute": + self.log.info("Deleting deprecated/renamed plugin: %s" % dir_name) + shutil.rmtree("%s/%s" % (self.plugin_path, dir_name)) + # -- Load / Unload -- # Load all plugin def loadPlugins(self): - for dir_name in os.listdir(self.plugin_path): + s = time.time() + for dir_name in sorted(os.listdir(self.plugin_path)): dir_path = os.path.join(self.plugin_path, dir_name) if dir_name.startswith("disabled"): continue # Dont load if disabled @@ -42,8 +53,13 @@ def loadPlugins(self): if dir_name not in self.plugin_names: self.plugin_names.append(dir_name) + self.log.debug("Plugins loaded in %.3fs" % (time.time() - s)) + for func in self.after_load: + func() + # Reload all plugins def reloadPlugins(self): + self.after_load = [] self.plugins_before = self.plugins self.plugins = defaultdict(list) # Reset registered plugins for module_name, module in sys.modules.items(): @@ -141,6 +157,11 @@ def classDecorator(self): return classDecorator +def afterLoad(func): + plugin_manager.after_load.append(func) + return func + + # - Example usage - if __name__ == "__main__": diff --git a/src/Site/Site.py b/src/Site/Site.py index 62d528b7b..29b00cab9 100644 --- a/src/Site/Site.py +++ b/src/Site/Site.py @@ -1,22 +1,17 @@ import os import json import logging -import hashlib import re import time import random import sys -import struct -import socket -import urllib -import urllib2 +import hashlib +import collections import gevent import gevent.pool import util -from lib import bencode -from lib.subtl.subtl import UdpTrackerClient from Config import config from Peer import Peer from Worker import WorkerManager @@ -27,6 +22,8 @@ from util import helper from util import Diff from Plugin import PluginManager +from File import FileServer +from SiteAnnouncer import SiteAnnouncer import SiteManager @@ -34,16 +31,16 @@ class Site(object): def __init__(self, address, allow_create=True, settings=None): - self.address = re.sub("[^A-Za-z0-9]", "", address) # Make sure its correct address + self.address = str(re.sub("[^A-Za-z0-9]", "", address)) # Make sure its correct address + self.address_hash = hashlib.sha256(self.address).digest() self.address_short = "%s..%s" % (self.address[:6], self.address[-4:]) # Short address for logging self.log = logging.getLogger("Site:%s" % self.address_short) self.addEventListeners() self.content = None # Load content.json self.peers = {} # Key: ip:port, Value: Peer.Peer + self.peers_recent = collections.deque(maxlen=100) self.peer_blacklist = SiteManager.peer_blacklist # Ignore this peers (eg. myself) - self.time_announce = 0 # Last announce time to tracker - self.last_tracker_id = random.randint(0, 10) # Last announced tracker id self.worker_manager = WorkerManager(self) # Handle site download from other peers self.bad_files = {} # SHA check failed files, need to redownload {"inner.content": 1} (key: file, value: failed accept) self.content_updated = None # Content.js update time @@ -52,14 +49,21 @@ def __init__(self, address, allow_create=True, settings=None): self.websockets = [] # Active site websocket connections self.connection_server = None - self.storage = SiteStorage(self, allow_create=allow_create) # Save and load site files self.loadSettings(settings) # Load settings from sites.json + self.storage = SiteStorage(self, allow_create=allow_create) # Save and load site files self.content_manager = ContentManager(self) self.content_manager.loadContents() # Load content.json files if "main" in sys.modules and "file_server" in dir(sys.modules["main"]): # Use global file server by default if possible self.connection_server = sys.modules["main"].file_server else: - self.connection_server = None + if "main" in sys.modules: + sys.modules["main"].file_server = FileServer() + self.connection_server = sys.modules["main"].file_server + else: + self.connection_server = FileServer() + + self.announcer = SiteAnnouncer(self) # Announce and get peer list from other nodes + if not self.settings.get("auth_key"): # To auth user in site (Obsolete, will be removed) self.settings["auth_key"] = CryptHash.random() self.log.debug("New auth key: %s" % self.settings["auth_key"]) @@ -68,6 +72,10 @@ def __init__(self, address, allow_create=True, settings=None): self.settings["wrapper_key"] = CryptHash.random() self.log.debug("New wrapper key: %s" % self.settings["wrapper_key"]) + if not self.settings.get("ajax_key"): # To auth websocket permissions + self.settings["ajax_key"] = CryptHash.random() + self.log.debug("New ajax key: %s" % self.settings["ajax_key"]) + def __str__(self): return "Site %s" % self.address_short @@ -82,17 +90,24 @@ def loadSettings(self, settings=None): self.settings = settings if "cache" not in settings: settings["cache"] = {} - if "size_files_optional" not in settings: + if "size_optional" not in settings: settings["size_optional"] = 0 if "optional_downloaded" not in settings: settings["optional_downloaded"] = 0 + if "downloaded" not in settings: + settings["downloaded"] = settings.get("added") self.bad_files = settings["cache"].get("bad_files", {}) settings["cache"]["bad_files"] = {} - # Reset tries + # Give it minimum 10 tries after restart for inner_path in self.bad_files: - self.bad_files[inner_path] = 1 + self.bad_files[inner_path] = min(self.bad_files[inner_path], 20) else: - self.settings = {"own": False, "serving": True, "permissions": [], "added": int(time.time()), "optional_downloaded": 0, "size_optional": 0} # Default + self.settings = { + "own": False, "serving": True, "permissions": [], "cache": {"bad_files": {}}, "size_files_optional": 0, + "added": int(time.time()), "downloaded": None, "optional_downloaded": 0, "size_optional": 0 + } # Default + if config.download_optional == "auto": + self.settings["autodownloadoptional"] = True # Add admin permissions to homepage if self.address == config.homepage and "ADMIN" not in self.settings["permissions"]: @@ -109,6 +124,12 @@ def saveSettings(self): SiteManager.site_manager.load(False) SiteManager.site_manager.save() + def getSettingsCache(self): + back = {} + back["bad_files"] = self.bad_files + back["hashfield"] = self.content_manager.hashfield.tostring().encode("base64") + return back + # Max site size in MB def getSizeLimit(self): return self.settings.get("size_limit", int(config.size_limit)) @@ -128,6 +149,9 @@ def downloadContent(self, inner_path, download_files=True, peer=None, check_modi if config.verbose: self.log.debug("Downloading %s..." % inner_path) + if not inner_path.endswith("content.json"): + return False + found = self.needFile(inner_path, update=self.bad_files.get(inner_path)) content_inner_dir = helper.getDirname(inner_path) if not found: @@ -157,14 +181,29 @@ def downloadContent(self, inner_path, download_files=True, peer=None, check_modi diff_actions = diffs.get(file_relative_path) if diff_actions and self.bad_files.get(file_inner_path): try: + s = time.time() new_file = Diff.patch(self.storage.open(file_inner_path, "rb"), diff_actions) new_file.seek(0) + time_diff = time.time() - s + + s = time.time() diff_success = self.content_manager.verifyFile(file_inner_path, new_file) + time_verify = time.time() - s + if diff_success: - self.log.debug("Patched successfully: %s" % file_inner_path) + s = time.time() new_file.seek(0) self.storage.write(file_inner_path, new_file) + time_write = time.time() - s + + s = time.time() self.onFileDone(file_inner_path) + time_on_done = time.time() - s + + self.log.debug( + "Patched successfully: %s (diff: %.3fs, verify: %.3fs, write: %.3fs, on_done: %.3fs)" % + (file_inner_path, time_diff, time_verify, time_write, time_on_done) + ) except Exception, err: self.log.debug("Failed to patch %s: %s" % (file_inner_path, err)) diff_success = False @@ -212,7 +251,7 @@ def downloadContent(self, inner_path, download_files=True, peer=None, check_modi self.log.debug("%s: Downloading %s files, changed: %s..." % (inner_path, len(file_threads), len(changed))) gevent.joinall(file_threads) if config.verbose: - self.log.debug("%s: DownloadContent ended in %.2fs" % (inner_path, time.time() - s)) + self.log.debug("%s: DownloadContent ended in %.3fs" % (inner_path, time.time() - s)) if not self.worker_manager.tasks: self.onComplete() # No more task trigger site complete @@ -227,9 +266,12 @@ def getReachableBadFiles(self): # Retry download bad files def retryBadFiles(self, force=False): + self.checkBadFiles() + self.log.debug("Retry %s bad files" % len(self.bad_files)) content_inner_paths = [] file_inner_paths = [] + for bad_file, tries in self.bad_files.items(): if force or random.randint(0, min(40, tries)) < 4: # Larger number tries = less likely to check every 15min if bad_file.endswith("content.json"): @@ -243,14 +285,30 @@ def retryBadFiles(self, force=False): if file_inner_paths: self.pooledDownloadFile(file_inner_paths, only_if_bad=True) + def checkBadFiles(self): + for bad_file in self.bad_files.keys(): + file_info = self.content_manager.getFileInfo(bad_file) + if bad_file.endswith("content.json"): + if file_info is False and bad_file != "content.json": + del self.bad_files[bad_file] + self.log.debug("No info for file: %s, removing from bad_files" % bad_file) + else: + if file_info is False or not file_info.get("size"): + del self.bad_files[bad_file] + self.log.debug("No info or size for file: %s, removing from bad_files" % bad_file) + # Download all files of the site @util.Noparallel(blocking=False) def download(self, check_size=False, blind_includes=False): + if not self.connection_server: + self.log.debug("No connection server found, skipping download") + return False + self.log.debug( "Start downloading, bad_files: %s, check_size: %s, blind_includes: %s" % (self.bad_files, check_size, blind_includes) ) - gevent.spawn(self.announce) + gevent.spawn(self.announce, force=True) if check_size: # Check the size first valid = self.downloadContent("content.json", download_files=False) # Just download content.json files if not valid: @@ -264,24 +322,38 @@ def download(self, check_size=False, blind_includes=False): return valid def pooledDownloadContent(self, inner_paths, pool_size=100, only_if_bad=False): - self.log.debug("New downloadContent pool: len: %s" % len(inner_paths)) + self.log.debug("New downloadContent pool: len: %s, only if bad: %s" % (len(inner_paths), only_if_bad)) self.worker_manager.started_task_num += len(inner_paths) pool = gevent.pool.Pool(pool_size) + num_skipped = 0 + site_size_limit = self.getSizeLimit() * 1024 * 1024 for inner_path in inner_paths: if not only_if_bad or inner_path in self.bad_files: pool.spawn(self.downloadContent, inner_path) + else: + num_skipped += 1 self.worker_manager.started_task_num -= 1 - self.log.debug("Ended downloadContent pool len: %s" % len(inner_paths)) + if self.settings["size"] > site_size_limit * 0.95: + self.log.warning("Site size limit almost reached, aborting downloadContent pool") + for aborted_inner_path in inner_paths: + if aborted_inner_path in self.bad_files: + del self.bad_files[aborted_inner_path] + self.worker_manager.removeSolvedFileTasks(mark_as_good=False) + break + self.log.debug("Ended downloadContent pool len: %s, skipped: %s" % (len(inner_paths), num_skipped)) def pooledDownloadFile(self, inner_paths, pool_size=100, only_if_bad=False): - self.log.debug("New downloadFile pool: len: %s" % len(inner_paths)) + self.log.debug("New downloadFile pool: len: %s, only if bad: %s" % (len(inner_paths), only_if_bad)) self.worker_manager.started_task_num += len(inner_paths) pool = gevent.pool.Pool(pool_size) + num_skipped = 0 for inner_path in inner_paths: if not only_if_bad or inner_path in self.bad_files: pool.spawn(self.needFile, inner_path, update=True) + else: + num_skipped += 1 self.worker_manager.started_task_num -= 1 - self.log.debug("Ended downloadFile pool len: %s" % len(inner_paths)) + self.log.debug("Ended downloadFile pool len: %s, skipped: %s" % (len(inner_paths), num_skipped)) # Update worker, try to find client that supports listModifications command def updater(self, peers_try, queried, since): @@ -289,11 +361,13 @@ def updater(self, peers_try, queried, since): if not peers_try or len(queried) >= 3: # Stop after 3 successful query break peer = peers_try.pop(0) - if not peer.connection and len(queried) < 2: - peer.connect() # Only open new connection if less than 2 queried already - if not peer.connection or peer.connection.handshake.get("rev", 0) < 126: - continue # Not compatible - res = peer.listModified(since) + if config.verbose: + self.log.debug("Try to get updates from: %s Left: %s" % (peer, peers_try)) + + res = None + with gevent.Timeout(20, exception=False): + res = peer.listModified(since) + if not res or "modified_files" not in res: continue # Failed query @@ -301,21 +375,28 @@ def updater(self, peers_try, queried, since): modified_contents = [] my_modified = self.content_manager.listModified(since) for inner_path, modified in res["modified_files"].iteritems(): # Check if the peer has newer files than we - newer = int(modified) > my_modified.get(inner_path, 0) - if newer and inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified): - # We dont have this file or we have older - modified_contents.append(inner_path) - self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 + has_newer = int(modified) > my_modified.get(inner_path, 0) + has_older = int(modified) < my_modified.get(inner_path, 0) + if inner_path not in self.bad_files and not self.content_manager.isArchived(inner_path, modified): + if has_newer: + # We dont have this file or we have older + modified_contents.append(inner_path) + self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 + if has_older: + self.log.debug("%s client has older version of %s, publishing there..." % (peer, inner_path)) + gevent.spawn(self.publisher, inner_path, [peer], [], 1) if modified_contents: self.log.debug("%s new modified file from %s" % (len(modified_contents), peer)) modified_contents.sort(key=lambda inner_path: 0 - res["modified_files"][inner_path]) # Download newest first - gevent.spawn(self.pooledDownloadContent, modified_contents) + gevent.spawn(self.pooledDownloadContent, modified_contents, only_if_bad=True) # Check modified content.json files from peers and add modified files to bad_files # Return: Successfully queried peers [Peer, Peer...] def checkModifications(self, since=None): + s = time.time() peers_try = [] # Try these peers queried = [] # Successfully queried from these peers + limit = 5 # Wait for peers if not self.peers: @@ -326,46 +407,56 @@ def checkModifications(self, since=None): if self.peers: break - for peer in self.peers.itervalues(): # Try to find connected good peers, but we must have at least 5 peers - if peer.findConnection() and peer.connection.handshake.get("rev", 0) > 125: # Add to the beginning if rev125 - peers_try.insert(0, peer) - elif len(peers_try) < 5: # Backup peers, add to end of the try list - peers_try.append(peer) + peers_try = self.getConnectedPeers() + peers_connected_num = len(peers_try) + if peers_connected_num < limit * 2: # Add more, non-connected peers if necessary + peers_try += self.getRecentPeers(limit * 5) if since is None: # No since defined, download from last modification time-1day since = self.settings.get("modified", 60 * 60 * 24) - 60 * 60 * 24 - self.log.debug("Try to get listModifications from peers: %s since: %s" % (peers_try, since)) + + if config.verbose: + self.log.debug( + "Try to get listModifications from peers: %s, connected: %s, since: %s" % + (peers_try, peers_connected_num, since) + ) updaters = [] for i in range(3): updaters.append(gevent.spawn(self.updater, peers_try, queried, since)) gevent.joinall(updaters, timeout=10) # Wait 10 sec to workers done query modifications - if not queried: - gevent.joinall(updaters, timeout=10) # Wait another 10 sec if none of updaters finished + if not queried: # Start another 3 thread if first 3 is stuck + peers_try[0:0] = [peer for peer in self.getConnectedPeers() if peer.connection.connected] # Add connected peers + for _ in range(10): + gevent.joinall(updaters, timeout=10) # Wait another 10 sec if none of updaters finished + if queried: + break + + self.log.debug("Queried listModifications from: %s in %.3fs since %s" % (queried, time.time() - s, since)) time.sleep(0.1) - self.log.debug("Queried listModifications from: %s" % queried) return queried # Update content.json from peers and download changed files # Return: None @util.Noparallel() - def update(self, announce=False, check_files=False): + def update(self, announce=False, check_files=False, since=None): self.content_manager.loadContent("content.json", load_includes=False) # Reload content.json self.content_updated = None # Reset content updated time self.updateWebsocket(updating=True) - for bad_file in self.bad_files.keys(): - file_info = self.content_manager.getFileInfo(bad_file) - if file_info is False or (not bad_file.endswith("content.json") and not file_info.get("size")): - del self.bad_files[bad_file] - self.log.debug("No info for file: %s, removing from bad_files" % bad_file) + # Remove files that no longer in content.json + self.checkBadFiles() if announce: - self.announce() + self.announce(force=True) - queried = self.checkModifications() + # Full update, we can reset bad files + if check_files and since == 0: + self.bad_files = {} + + queried = self.checkModifications(since) if check_files: self.storage.updateBadFiles(quick_check=True) # Quick check and mark bad files based on file size @@ -380,6 +471,8 @@ def update(self, announce=False, check_files=False): # Failed to query modifications self.content_updated = False self.bad_files["content.json"] = 1 + else: + self.content_updated = time.time() self.updateWebsocket(updated=True) @@ -394,33 +487,17 @@ def redownloadContents(self): gevent.joinall(content_threads) # Publish worker - def publisher(self, inner_path, peers, published, limit, event_done=None, diffs={}): + def publisher(self, inner_path, peers, published, limit, diffs={}, event_done=None, cb_progress=None): file_size = self.storage.getSize(inner_path) content_json_modified = self.content_manager.contents[inner_path]["modified"] body = self.storage.read(inner_path) - # Find out my ip and port - tor_manager = self.connection_server.tor_manager - if tor_manager and tor_manager.enabled and tor_manager.start_onions: - my_ip = tor_manager.getOnion(self.address) - if my_ip: - my_ip += ".onion" - my_port = config.fileserver_port - else: - my_ip = config.ip_external - if self.connection_server.port_opened: - my_port = config.fileserver_port - else: - my_port = 0 - while 1: if not peers or len(published) >= limit: if event_done: event_done.set(True) break # All peers done, or published engouht - peer = peers.pop(0) - if peer in peers: # Remove duplicate - peers.remove(peer) + peer = peers.pop() if peer in published: continue if peer.last_content_json_update == content_json_modified: @@ -442,8 +519,7 @@ def publisher(self, inner_path, peers, published, limit, event_done=None, diffs= "site": self.address, "inner_path": inner_path, "body": body, - "diffs": diffs, - "peer": (my_ip, my_port) + "diffs": diffs }) if result: break @@ -453,16 +529,18 @@ def publisher(self, inner_path, peers, published, limit, event_done=None, diffs= if result and "ok" in result: published.append(peer) + if cb_progress and len(published) <= limit: + cb_progress(len(published), limit) self.log.info("[OK] %s: %s %s/%s" % (peer.key, result["ok"], len(published), limit)) else: if result == {"exception": "Timeout"}: - peer.onConnectionError() + peer.onConnectionError("Publish timeout") self.log.info("[FAILED] %s: %s" % (peer.key, result)) time.sleep(0.01) # Update content.json on peers @util.Noparallel() - def publish(self, limit="default", inner_path="content.json", diffs={}): + def publish(self, limit="default", inner_path="content.json", diffs={}, cb_progress=None): published = [] # Successfully published (Peer) publishers = [] # Publisher threads @@ -470,7 +548,7 @@ def publish(self, limit="default", inner_path="content.json", diffs={}): self.announce() if limit == "default": - limit = 3 + limit = 5 threads = limit peers = self.getConnectedPeers() @@ -479,11 +557,10 @@ def publish(self, limit="default", inner_path="content.json", diffs={}): random.shuffle(peers) peers = sorted(peers, key=lambda peer: peer.connection.handshake.get("rev", 0) < config.rev - 100) # Prefer newer clients - # Add more, non-connected peers is necessary - if len(peers) < limit * 2: - peers_more = self.peers.values() - random.shuffle(peers_more) - peers += peers_more[0:limit * 2] + if len(peers) < limit * 2 and len(self.peers) > len(peers): # Add more, non-connected peers if necessary + peers += self.getRecentPeers(limit * 2) + + peers = set(peers) self.log.info("Publishing %s to %s/%s peers (connected: %s) diffs: %s (%.2fk)..." % ( inner_path, limit, len(self.peers), num_connected_peers, diffs.keys(), float(len(str(diffs))) / 1024 @@ -494,7 +571,7 @@ def publish(self, limit="default", inner_path="content.json", diffs={}): event_done = gevent.event.AsyncResult() for i in range(min(len(peers), limit, threads)): - publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit, event_done, diffs) + publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit, diffs, event_done, cb_progress) publishers.append(publisher) event_done.get() # Wait for done @@ -505,7 +582,7 @@ def publish(self, limit="default", inner_path="content.json", diffs={}): # Publish more peers in the backgroup self.log.info( - "Successfuly %s published to %s peers, publishing to %s more peers in the background" % + "Published %s to %s peers, publishing to %s more peers in the background" % (inner_path, len(published), limit) ) @@ -518,7 +595,7 @@ def publish(self, limit="default", inner_path="content.json", diffs={}): return len(published) # Copy this site - def clone(self, address, privatekey=None, address_index=None, overwrite=False): + def clone(self, address, privatekey=None, address_index=None, root_inner_path="", overwrite=False): import shutil new_site = SiteManager.site_manager.need(address, all_file=False) default_dirs = [] # Dont copy these directories (has -default version) @@ -526,16 +603,25 @@ def clone(self, address, privatekey=None, address_index=None, overwrite=False): if "-default" in dir_name: default_dirs.append(dir_name.replace("-default", "")) - self.log.debug("Cloning to %s, ignore dirs: %s" % (address, default_dirs)) + self.log.debug("Cloning to %s, ignore dirs: %s, root: %s" % (address, default_dirs, root_inner_path)) # Copy root content.json if not new_site.storage.isFile("content.json") and not overwrite: - # Content.json not exist yet, create a new one from source site - content_json = self.storage.loadJson("content.json") + # New site: Content.json not exist yet, create a new one from source site + if "size_limit" in self.settings: + new_site.settings["size_limit"] = self.settings["size_limit"] + + # Use content.json-default is specified + if self.storage.isFile(root_inner_path + "/content.json-default"): + content_json = self.storage.loadJson(root_inner_path + "/content.json-default") + else: + content_json = self.storage.loadJson("content.json") + if "domain" in content_json: del content_json["domain"] content_json["title"] = "my" + content_json["title"] content_json["cloned_from"] = self.address + content_json["clone_root"] = root_inner_path content_json["files"] = {} if address_index: content_json["address_index"] = address_index # Site owner's BIP32 index @@ -546,20 +632,38 @@ def clone(self, address, privatekey=None, address_index=None, overwrite=False): # Copy files for content_inner_path, content in self.content_manager.contents.items(): - for file_relative_path in sorted(content["files"].keys()): + file_relative_paths = content.get("files", {}).keys() + + # Sign content.json at the end to make sure every file is included + file_relative_paths.sort() + file_relative_paths.sort(key=lambda key: key.replace("-default", "").endswith("content.json")) + + for file_relative_path in file_relative_paths: file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to content.json file_inner_path = file_inner_path.strip("/") # Strip leading / + if not file_inner_path.startswith(root_inner_path): + self.log.debug("[SKIP] %s (not in clone root)" % file_inner_path) + continue if file_inner_path.split("/")[0] in default_dirs: # Dont copy directories that has -default postfixed alternative self.log.debug("[SKIP] %s (has default alternative)" % file_inner_path) continue file_path = self.storage.getPath(file_inner_path) # Copy the file normally to keep the -default postfixed dir and file to allow cloning later - file_path_dest = new_site.storage.getPath(file_inner_path) + if root_inner_path: + file_inner_path_dest = re.sub("^%s/" % re.escape(root_inner_path), "", file_inner_path) + file_path_dest = new_site.storage.getPath(file_inner_path_dest) + else: + file_inner_path_dest = file_inner_path + file_path_dest = new_site.storage.getPath(file_inner_path) + self.log.debug("[COPY] %s to %s..." % (file_inner_path, file_path_dest)) dest_dir = os.path.dirname(file_path_dest) if not os.path.isdir(dest_dir): os.makedirs(dest_dir) + if file_inner_path_dest.replace("-default", "") == "content.json": # Don't copy root content.json-default + continue + shutil.copy(file_path, file_path_dest) # If -default in path, create a -default less copy of the file @@ -582,13 +686,13 @@ def clone(self, address, privatekey=None, address_index=None, overwrite=False): delete_removed_files=False, load_includes=False ) if privatekey: - new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey) + new_site.content_manager.sign(file_inner_path.replace("-default", ""), privatekey, remove_missing_optional=True) new_site.content_manager.loadContent( file_inner_path, add_bad_files=False, delete_removed_files=False, load_includes=False ) if privatekey: - new_site.content_manager.sign("content.json", privatekey) + new_site.content_manager.sign("content.json", privatekey, remove_missing_optional=True) new_site.content_manager.loadContent( "content.json", add_bad_files=False, delete_removed_files=False, load_includes=False ) @@ -604,6 +708,27 @@ def clone(self, address, privatekey=None, address_index=None, overwrite=False): def pooledNeedFile(self, *args, **kwargs): return self.needFile(*args, **kwargs) + def isFileDownloadAllowed(self, inner_path, file_info): + if file_info.get("size", 0) > config.file_size_limit * 1024 * 1024: + self.log.debug( + "File size %s too large: %sMB > %sMB, skipping..." % + (inner_path, file_info.get("size", 0) / 1024 / 1024, config.file_size_limit) + ) + return False + else: + return True + + def needFileInfo(self, inner_path): + file_info = self.content_manager.getFileInfo(inner_path) + if not file_info: + # No info for file, download all content.json first + self.log.debug("No info for %s, waiting for all content.json" % inner_path) + success = self.downloadContent("content.json", download_files=False) + if not success: + return False + file_info = self.content_manager.getFileInfo(inner_path) + return file_info + # Check and download if file not exist def needFile(self, inner_path, update=False, blocking=True, peer=None, priority=0): if self.storage.isFile(inner_path) and not update: # File exist, no need to do anything @@ -617,22 +742,16 @@ def needFile(self, inner_path, update=False, blocking=True, peer=None, priority= gevent.spawn(self.announce) if inner_path != "content.json": # Prevent double download task = self.worker_manager.addTask("content.json", peer) - task.get() + task["evt"].get() self.content_manager.loadContent() if not self.content_manager.contents.get("content.json"): return False # Content.json download failed + file_info = None if not inner_path.endswith("content.json"): - file_info = self.content_manager.getFileInfo(inner_path) + file_info = self.needFileInfo(inner_path) if not file_info: - # No info for file, download all content.json first - self.log.debug("No info for %s, waiting for all content.json" % inner_path) - success = self.downloadContent("content.json", download_files=False) - if not success: - return False - file_info = self.content_manager.getFileInfo(inner_path) - if not file_info: - return False # Still no info for file + return False if "cert_signers" in file_info and not file_info["content_inner_path"] in self.content_manager.contents: self.log.debug("Missing content.json for requested user file: %s" % inner_path) if self.bad_files.get(file_info["content_inner_path"], 0) > 5: @@ -642,22 +761,28 @@ def needFile(self, inner_path, update=False, blocking=True, peer=None, priority= return False self.downloadContent(file_info["content_inner_path"]) - task = self.worker_manager.addTask(inner_path, peer, priority=priority) + if not self.isFileDownloadAllowed(inner_path, file_info): + self.log.debug("%s: Download not allowed" % inner_path) + return False + + task = self.worker_manager.addTask(inner_path, peer, priority=priority, file_info=file_info) if blocking: - return task.get() + return task["evt"].get() else: - return task + return task["evt"] # Add or update a peer to site # return_peer: Always return the peer even if it was already present - def addPeer(self, ip, port, return_peer=False, connection=None): - if not ip: + def addPeer(self, ip, port, return_peer=False, connection=None, source="other"): + if not ip or ip == "0.0.0.0": return False + key = "%s:%s" % (ip, port) - if key in self.peers: # Already has this ip - self.peers[key].found() + peer = self.peers.get(key) + if peer: # Already has this ip + peer.found(source) if return_peer: # Always return peer - return self.peers[key] + return peer else: return False else: # New peer @@ -665,194 +790,26 @@ def addPeer(self, ip, port, return_peer=False, connection=None): return False # Ignore blacklist (eg. myself) peer = Peer(ip, port, self) self.peers[key] = peer + peer.found(source) return peer - # Gather peer from connected peers - @util.Noparallel(blocking=False) - def announcePex(self, query_num=2, need_num=5): - peers = [peer for peer in self.peers.values() if peer.connection and peer.connection.connected] # Connected peers - if len(peers) == 0: # Small number of connected peers for this site, connect to any - self.log.debug("Small number of peers detected...query all of peers using pex") - peers = self.peers.values() - need_num = 10 - - random.shuffle(peers) - done = 0 - added = 0 - for peer in peers: - res = peer.pex(need_num=need_num) - if type(res) == int: # We have result - done += 1 - added += res - if res: - self.worker_manager.onPeers() - self.updateWebsocket(peers_added=res) - if done == query_num: - break - self.log.debug("Queried pex from %s peers got %s new peers." % (done, added)) - - # Gather peers from tracker - # Return: Complete time or False on error - def announceTracker(self, tracker_protocol, tracker_address, fileserver_port=0, add_types=[], my_peer_id="", mode="start"): - s = time.time() - if "ip4" not in add_types: - fileserver_port = 0 - - if tracker_protocol == "udp": # Udp tracker - if config.disable_udp: - return False # No udp supported - ip, port = tracker_address.split(":") - tracker = UdpTrackerClient(ip, int(port)) - tracker.peer_port = fileserver_port - try: - tracker.connect() - tracker.poll_once() - tracker.announce(info_hash=hashlib.sha1(self.address).hexdigest(), num_want=50) - back = tracker.poll_once() - peers = back["response"]["peers"] - except Exception, err: - return False - - elif tracker_protocol == "http": # Http tracker - params = { - 'info_hash': hashlib.sha1(self.address).digest(), - 'peer_id': my_peer_id, 'port': fileserver_port, - 'uploaded': 0, 'downloaded': 0, 'left': 0, 'compact': 1, 'numwant': 30, - 'event': 'started' - } - req = None - try: - url = "http://" + tracker_address + "?" + urllib.urlencode(params) - # Load url - with gevent.Timeout(30, False): # Make sure of timeout - req = urllib2.urlopen(url, timeout=25) - response = req.read() - req.fp._sock.recv = None # Hacky avoidance of memory leak for older python versions - req.close() - req = None - if not response: - self.log.debug("Http tracker %s response error" % url) - return False - # Decode peers - peer_data = bencode.decode(response)["peers"] - response = None - peer_count = len(peer_data) / 6 - peers = [] - for peer_offset in xrange(peer_count): - off = 6 * peer_offset - peer = peer_data[off:off + 6] - addr, port = struct.unpack('!LH', peer) - peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port}) - except Exception, err: - self.log.debug("Http tracker %s error: %s" % (url, err)) - if req: - req.close() - req = None - return False - else: - peers = [] - - # Adding peers - added = 0 - for peer in peers: - if not peer["port"]: - continue # Dont add peers with port 0 - if self.addPeer(peer["addr"], peer["port"]): - added += 1 - if added: - self.worker_manager.onPeers() - self.updateWebsocket(peers_added=added) - self.log.debug("Found %s peers, new: %s, total: %s" % (len(peers), added, len(self.peers))) - return time.time() - s - - # Add myself and get other peers from tracker - def announce(self, force=False, mode="start", pex=True): - if time.time() < self.time_announce + 30 and not force: - return # No reannouncing within 30 secs - self.time_announce = time.time() - - trackers = config.trackers - # Filter trackers based on supported networks - if config.disable_udp: - trackers = [tracker for tracker in trackers if not tracker.startswith("udp://")] - if self.connection_server and self.connection_server.tor_manager and not self.connection_server.tor_manager.enabled: - trackers = [tracker for tracker in trackers if ".onion" not in tracker] - - if trackers and (mode == "update" or mode == "more"): # Only announce on one tracker, increment the queried tracker id - self.last_tracker_id += 1 - self.last_tracker_id = self.last_tracker_id % len(trackers) - trackers = [trackers[self.last_tracker_id]] # We only going to use this one - - errors = [] - slow = [] - add_types = [] - if self.connection_server: - my_peer_id = self.connection_server.peer_id - - # Type of addresses they can reach me - if self.connection_server.port_opened: - add_types.append("ip4") - if self.connection_server.tor_manager and self.connection_server.tor_manager.start_onions: - add_types.append("onion") - else: - my_peer_id = "" - - s = time.time() - announced = 0 - threads = [] - fileserver_port = config.fileserver_port - - for tracker in trackers: # Start announce threads - tracker_protocol, tracker_address = tracker.split("://") - thread = gevent.spawn( - self.announceTracker, tracker_protocol, tracker_address, fileserver_port, add_types, my_peer_id, mode - ) - threads.append(thread) - thread.tracker_address = tracker_address - thread.tracker_protocol = tracker_protocol - - gevent.joinall(threads, timeout=10) # Wait for announce finish - - for thread in threads: - if thread.value: - if thread.value > 1: - slow.append("%.2fs %s://%s" % (thread.value, thread.tracker_protocol, thread.tracker_address)) - announced += 1 - else: - if thread.ready(): - errors.append("%s://%s" % (thread.tracker_protocol, thread.tracker_address)) - else: # Still running - slow.append("10s+ %s://%s" % (thread.tracker_protocol, thread.tracker_address)) - - # Save peers num - self.settings["peers"] = len(self.peers) - - if len(errors) < len(threads): # Less errors than total tracker nums - self.log.debug( - "Announced types %s in mode %s to %s trackers in %.3fs, errors: %s, slow: %s" % - (add_types, mode, announced, time.time() - s, errors, slow) - ) - else: - if mode != "update": - self.log.error("Announce to %s trackers in %.3fs, failed" % (announced, time.time() - s)) - - if pex: - if not [peer for peer in self.peers.values() if peer.connection and peer.connection.connected]: - # If no connected peer yet then wait for connections - gevent.spawn_later(3, self.announcePex, need_num=10) # Spawn 3 secs later - else: # Else announce immediately - if mode == "more": # Need more peers - self.announcePex(need_num=10) - else: - self.announcePex() + def announce(self, *args, **kwargs): + self.announcer.announce(*args, **kwargs) # Keep connections to get the updates - def needConnections(self, num=5): + def needConnections(self, num=None, check_site_on_reconnect=False): + if num is None: + if len(self.peers) < 50: + num = 3 + else: + num = 6 need = min(len(self.peers), num, config.connected_limit) # Need 5 peer, but max total peers - connected = self.getConnectedPeers() + connected = len(self.getConnectedPeers()) - self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, len(connected), len(self.peers))) + connected_before = connected + + self.log.debug("Need connections: %s, Current: %s, Total: %s" % (need, connected, len(self.peers))) if connected < need: # Need more than we have for peer in self.peers.values(): @@ -862,12 +819,19 @@ def needConnections(self, num=5): connected += 1 # Successfully connected if connected >= need: break + self.log.debug( + "Connected before: %s, after: %s. Check site: %s." % + (connected_before, connected, check_site_on_reconnect) + ) + + if check_site_on_reconnect and connected_before == 0 and connected > 0 and self.connection_server.has_internet: + gevent.spawn(self.update, check_files=False) + return connected - # Return: Probably working, connectable Peers - def getConnectablePeers(self, need_num=5, ignore=[]): + # Return: Probably peers verified to be connectable recently + def getConnectablePeers(self, need_num=5, ignore=[], allow_private=True): peers = self.peers.values() - random.shuffle(peers) found = [] for peer in peers: if peer.key.endswith(":0"): @@ -879,20 +843,69 @@ def getConnectablePeers(self, need_num=5, ignore=[]): if time.time() - peer.connection.last_recv_time > 60 * 60 * 2: # Last message more than 2 hours ago peer.connection = None # Cleanup: Dead connection continue + if not allow_private and helper.isPrivateIp(peer.ip): + continue found.append(peer) if len(found) >= need_num: break # Found requested number of peers - if need_num > 5 and need_num < 100 and len(found) < need_num: # Return not that good peers - found = [peer for peer in peers if not peer.key.endswith(":0") and peer.key not in ignore][0:need_num - len(found)] + if len(found) < need_num: # Return not that good peers + found += [ + peer for peer in peers + if not peer.key.endswith(":0") and + peer.key not in ignore and + (allow_private or not helper.isPrivateIp(peer.ip)) + ][0:need_num - len(found)] return found + # Return: Recently found peers + def getRecentPeers(self, need_num): + found = list(set(self.peers_recent)) + self.log.debug("Recent peers %s of %s (need: %s)" % (len(found), len(self.peers_recent), need_num)) + + if len(found) >= need_num or len(found) >= len(self.peers): + return sorted( + found, + key=lambda peer: peer.reputation, + reverse=True + )[0:need_num] + + # Add random peers + need_more = need_num - len(found) + found_more = sorted( + self.peers.values()[0:need_more * 50], + key=lambda peer: peer.reputation, + reverse=True + )[0:need_more * 2] + + found += found_more + + return found[0:need_num] + def getConnectedPeers(self): - return [peer for peer in self.peers.values() if peer.connection and peer.connection.connected] + back = [] + if not self.connection_server: + return [] + + tor_manager = self.connection_server.tor_manager + for connection in self.connection_server.connections: + if not connection.connected and time.time() - connection.start_time > 20: # Still not connected after 20s + continue + peer = self.peers.get("%s:%s" % (connection.ip, connection.port)) + if peer: + if connection.ip.endswith(".onion") and connection.target_onion and tor_manager.start_onions: + # Check if the connection is made with the onion address created for the site + valid_target_onions = (tor_manager.getOnion(self.address), tor_manager.getOnion("global")) + if connection.target_onion not in valid_target_onions: + continue + if not peer.connection: + peer.connect(connection) + back.append(peer) + return back # Cleanup probably dead peers and close connection if too much - def cleanupPeers(self): + def cleanupPeers(self, peers_protected=[]): peers = self.peers.values() if len(peers) > 20: # Cleanup old peers @@ -908,7 +921,7 @@ def cleanupPeers(self): if peer.connection and not peer.connection.connected: peer.connection = None # Dead connection if time.time() - peer.time_found > ttl: # Not found on tracker or via pex in last 4 hour - peer.remove() + peer.remove("Time found expired") removed += 1 if removed > len(peers) * 0.1: # Don't remove too much at once break @@ -918,13 +931,20 @@ def cleanupPeers(self): # Close peers over the limit closed = 0 - connected_peers = self.getConnectedPeers() + connected_peers = [peer for peer in self.getConnectedPeers() if peer.connection.connected] # Only fully connected peers need_to_close = len(connected_peers) - config.connected_limit if closed < need_to_close: - sorted(connected_peers, key=lambda peer: peer.connection.sites) # Try to keep connections with more sites - for peer in connected_peers: - peer.remove() + # Try to keep connections with more sites + for peer in sorted(connected_peers, key=lambda peer: min(peer.connection.sites, 5)): + if not peer.connection: + continue + if peer.key in peers_protected: + continue + if peer.connection.sites > 5: + break + peer.connection.close("Cleanup peers") + peer.connection = None closed += 1 if closed >= need_to_close: break @@ -945,15 +965,17 @@ def sendMyHashfield(self, limit=5): if sent >= limit: break if sent: - self.log.debug("Sent my hashfield to %s peers" % sent) + my_hashfield_changed = self.content_manager.hashfield.time_changed + self.log.debug("Sent my hashfield (chaged %.3fs ago) to %s peers" % (time.time() - my_hashfield_changed, sent)) return sent # Update hashfield def updateHashfield(self, limit=5): # Return if no optional files - if not self.content_manager.hashfield and not self.content_manager.contents.get("content.json", {}).get("files_optional"): + if not self.content_manager.hashfield and not self.content_manager.has_optional_files: return False + s = time.time() queried = 0 connected_peers = self.getConnectedPeers() for peer in connected_peers: @@ -964,7 +986,7 @@ def updateHashfield(self, limit=5): if queried >= limit: break if queried: - self.log.debug("Queried hashfield from %s peers" % queried) + self.log.debug("Queried hashfield from %s peers in %.3fs" % (queried, time.time() - s)) return queried # Returns if the optional file is need to be downloaded or not @@ -1003,6 +1025,13 @@ def updateWebsocket(self, **kwargs): for ws in self.websockets: ws.event("siteChanged", self, param) + def messageWebsocket(self, message, type="info", progress=None): + for ws in self.websockets: + if progress is None: + ws.cmd("notification", [type, message]) + else: + ws.cmd("progress", [type, message, progress]) + # File download started @util.Noparallel(blocking=False) def fileStarted(self): @@ -1019,6 +1048,8 @@ def fileDone(self, inner_path): # Update content.json last downlad time if inner_path == "content.json": + if not self.settings.get("downloaded"): + self.settings["downloaded"] = int(time.time()) self.content_updated = time.time() self.updateWebsocket(file_done=inner_path) @@ -1028,7 +1059,14 @@ def fileFailed(self, inner_path): if inner_path == "content.json": self.content_updated = False self.log.debug("Can't update content.json") - if inner_path in self.bad_files: + if inner_path in self.bad_files and self.connection_server.has_internet: self.bad_files[inner_path] = self.bad_files.get(inner_path, 0) + 1 self.updateWebsocket(file_failed=inner_path) + + if self.bad_files.get(inner_path, 0) > 30: + self.fileForgot(inner_path) + + def fileForgot(self, inner_path): + self.log.debug("Giving up on %s" % inner_path) + del self.bad_files[inner_path] # Give up after 30 tries diff --git a/src/Site/SiteAnnouncer.py b/src/Site/SiteAnnouncer.py new file mode 100644 index 000000000..48a05e69c --- /dev/null +++ b/src/Site/SiteAnnouncer.py @@ -0,0 +1,407 @@ +import random +import time +import hashlib +import urllib +import urllib2 +import struct +import socket +import re +import collections + +from lib import bencode +from lib.subtl.subtl import UdpTrackerClient +from lib.PySocks import socks +from lib.PySocks import sockshandler +import gevent + +from Plugin import PluginManager +from Config import config +from Debug import Debug +from util import helper +import util + + +class AnnounceError(Exception): + pass + +global_stats = collections.defaultdict(lambda: collections.defaultdict(int)) + + +@PluginManager.acceptPlugins +class SiteAnnouncer(object): + def __init__(self, site): + self.site = site + self.stats = {} + self.fileserver_port = config.fileserver_port + self.peer_id = self.site.connection_server.peer_id + self.last_tracker_id = random.randint(0, 10) + self.time_last_announce = 0 + + def getTrackers(self): + return config.trackers + + def getSupportedTrackers(self): + trackers = self.getTrackers() + if config.disable_udp or config.trackers_proxy != "disable": + trackers = [tracker for tracker in trackers if not tracker.startswith("udp://")] + + if not self.site.connection_server.tor_manager.enabled: + trackers = [tracker for tracker in trackers if ".onion" not in tracker] + + if "ipv6" not in self.site.connection_server.supported_ip_types: + trackers = [tracker for tracker in trackers if helper.getIpType(self.getAddressParts(tracker)["ip"]) != "ipv6"] + + return trackers + + def getAnnouncingTrackers(self, mode): + trackers = self.getSupportedTrackers() + + if trackers and (mode == "update" or mode == "more"): # Only announce on one tracker, increment the queried tracker id + self.last_tracker_id += 1 + self.last_tracker_id = self.last_tracker_id % len(trackers) + trackers_announcing = [trackers[self.last_tracker_id]] # We only going to use this one + else: + trackers_announcing = trackers + + return trackers_announcing + + def getOpenedServiceTypes(self): + back = [] + # Type of addresses they can reach me + if config.trackers_proxy == "disable": + for ip_type, opened in self.site.connection_server.port_opened.items(): + if opened: + back.append(ip_type) + if self.site.connection_server.tor_manager.start_onions: + back.append("onion") + return back + + @util.Noparallel(blocking=False) + def announce(self, force=False, mode="start", pex=True): + if time.time() - self.time_last_announce < 30 and not force: + return # No reannouncing within 30 secs + if force: + self.site.log.debug("Force reannounce in mode %s" % mode) + + self.fileserver_port = config.fileserver_port + self.time_last_announce = time.time() + + trackers = self.getAnnouncingTrackers(mode) + + if config.verbose: + self.site.log.debug("Tracker announcing, trackers: %s" % trackers) + + errors = [] + slow = [] + s = time.time() + threads = [] + num_announced = 0 + + for tracker in trackers: # Start announce threads + tracker_stats = global_stats[tracker] + # Reduce the announce time for trackers that looks unreliable + if tracker_stats["num_error"] > 5 and tracker_stats["time_request"] > time.time() - 60 * min(30, tracker_stats["num_error"]): + if config.verbose: + self.site.log.debug("Tracker %s looks unreliable, announce skipped (error: %s)" % (tracker, tracker_stats["num_error"])) + continue + thread = gevent.spawn(self.announceTracker, tracker, mode=mode) + threads.append(thread) + thread.tracker = tracker + + time.sleep(0.01) + self.updateWebsocket(trackers="announcing") + + gevent.joinall(threads, timeout=20) # Wait for announce finish + + for thread in threads: + if thread.value is None: + continue + if thread.value is not False: + if thread.value > 1.0: # Takes more than 1 second to announce + slow.append("%.2fs %s" % (thread.value, thread.tracker)) + num_announced += 1 + else: + if thread.ready(): + errors.append(thread.tracker) + else: # Still running + slow.append("30s+ %s" % thread.tracker) + + # Save peers num + self.site.settings["peers"] = len(self.site.peers) + + if len(errors) < len(threads): # At least one tracker finished + if len(trackers) == 1: + announced_to = trackers[0] + else: + announced_to = "%s/%s trackers" % (num_announced, len(threads)) + if mode != "update" or config.verbose: + self.site.log.debug( + "Announced in mode %s to %s in %.3fs, errors: %s, slow: %s" % + (mode, announced_to, time.time() - s, errors, slow) + ) + else: + if len(threads) > 1: + self.site.log.error("Announce to %s trackers in %.3fs, failed" % (len(threads), time.time() - s)) + if len(threads) == 1 and mode != "start": # Move to next tracker + self.site.log.debug("Tracker failed, skipping to next one...") + gevent.spawn_later(1.0, self.announce, force=force, mode=mode, pex=pex) + + self.updateWebsocket(trackers="announced") + + if pex: + self.updateWebsocket(pex="announcing") + if mode == "more": # Need more peers + self.announcePex(need_num=10) + else: + self.announcePex() + + self.updateWebsocket(pex="announced") + + def getTrackerHandler(self, protocol): + if protocol == "udp": + handler = self.announceTrackerUdp + elif protocol == "http": + handler = self.announceTrackerHttp + elif protocol == "https": + handler = self.announceTrackerHttps + else: + handler = None + return handler + + def getAddressParts(self, tracker): + if "://" not in tracker or not re.match("^[A-Za-z0-9:/\\.#-]+$", tracker): + return None + protocol, address = tracker.split("://", 1) + try: + ip, port = address.rsplit(":", 1) + except ValueError as err: + ip = address + port = 80 + if protocol.startswith("https"): + port = 443 + back = {} + back["protocol"] = protocol + back["address"] = address + back["ip"] = ip + back["port"] = port + return back + + def announceTracker(self, tracker, mode="start", num_want=10): + s = time.time() + address_parts = self.getAddressParts(tracker) + if not address_parts: + self.site.log.warning("Tracker %s error: Invalid address" % tracker.decode("utf8", "ignore")) + return False + + if tracker not in self.stats: + self.stats[tracker] = {"status": "", "num_request": 0, "num_success": 0, "num_error": 0, "time_request": 0, "time_last_error": 0} + + last_status = self.stats[tracker]["status"] + self.stats[tracker]["status"] = "announcing" + self.stats[tracker]["time_request"] = time.time() + global_stats[tracker]["time_request"] = time.time() + if config.verbose: + self.site.log.debug("Tracker announcing to %s (mode: %s)" % (tracker, mode)) + if mode == "update": + num_want = 10 + else: + num_want = 30 + + handler = self.getTrackerHandler(address_parts["protocol"]) + error = None + try: + if handler: + peers = handler(address_parts["address"], mode=mode, num_want=num_want) + else: + raise AnnounceError("Unknown protocol: %s" % address_parts["protocol"]) + except Exception, err: + self.site.log.warning("Tracker %s announce failed: %s in mode %s" % (tracker, str(err).decode("utf8", "ignore"), mode)) + error = err + + if error: + self.stats[tracker]["status"] = "error" + self.stats[tracker]["time_status"] = time.time() + self.stats[tracker]["last_error"] = str(err).decode("utf8", "ignore") + self.stats[tracker]["time_last_error"] = time.time() + self.stats[tracker]["num_error"] += 1 + self.stats[tracker]["num_request"] += 1 + global_stats[tracker]["num_request"] += 1 + global_stats[tracker]["num_error"] += 1 + self.updateWebsocket(tracker="error") + return False + + if peers is None: # Announce skipped + self.stats[tracker]["time_status"] = time.time() + self.stats[tracker]["status"] = last_status + return None + + self.stats[tracker]["status"] = "announced" + self.stats[tracker]["time_status"] = time.time() + self.stats[tracker]["num_success"] += 1 + self.stats[tracker]["num_request"] += 1 + global_stats[tracker]["num_request"] += 1 + global_stats[tracker]["num_error"] = 0 + + if peers is True: # Announce success, but no peers returned + return time.time() - s + + # Adding peers + added = 0 + for peer in peers: + if peer["port"] == 1: # Some trackers does not accept port 0, so we send port 1 as not-connectable + peer["port"] = 0 + if not peer["port"]: + continue # Dont add peers with port 0 + if self.site.addPeer(peer["addr"], peer["port"], source="tracker"): + added += 1 + + if added: + self.site.worker_manager.onPeers() + self.site.updateWebsocket(peers_added=added) + + if config.verbose: + self.site.log.debug( + "Tracker result: %s://%s (found %s peers, new: %s, total: %s)" % + (address_parts["protocol"], address_parts["address"], len(peers), added, len(self.site.peers)) + ) + return time.time() - s + + def announceTrackerUdp(self, tracker_address, mode="start", num_want=10): + s = time.time() + if config.disable_udp: + raise AnnounceError("Udp disabled by config") + if config.trackers_proxy != "disable": + raise AnnounceError("Udp trackers not available with proxies") + + ip, port = tracker_address.split("/")[0].split(":") + tracker = UdpTrackerClient(ip, int(port)) + if helper.getIpType(ip) in self.getOpenedServiceTypes(): + tracker.peer_port = self.fileserver_port + else: + tracker.peer_port = 0 + tracker.connect() + if not tracker.poll_once(): + raise AnnounceError("Could not connect") + tracker.announce(info_hash=hashlib.sha1(self.site.address).hexdigest(), num_want=num_want, left=431102370) + back = tracker.poll_once() + if not back: + raise AnnounceError("No response after %.0fs" % (time.time() - s)) + elif type(back) is dict and "response" in back: + peers = back["response"]["peers"] + else: + raise AnnounceError("Invalid response: %r" % back) + + return peers + + def httpRequest(self, url): + headers = { + 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11', + 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', + 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', + 'Accept-Encoding': 'none', + 'Accept-Language': 'en-US,en;q=0.8', + 'Connection': 'keep-alive' + } + + req = urllib2.Request(url, headers=headers) + + if config.trackers_proxy == "tor": + tor_manager = self.site.connection_server.tor_manager + handler = sockshandler.SocksiPyHandler(socks.SOCKS5, tor_manager.proxy_ip, tor_manager.proxy_port) + opener = urllib2.build_opener(handler) + return opener.open(req, timeout=50) + elif config.trackers_proxy == "disable": + return urllib2.urlopen(req, timeout=25) + else: + proxy_ip, proxy_port = config.trackers_proxy.split(":") + handler = sockshandler.SocksiPyHandler(socks.SOCKS5, proxy_ip, int(proxy_port)) + opener = urllib2.build_opener(handler) + return opener.open(req, timeout=50) + + def announceTrackerHttps(self, *args, **kwargs): + kwargs["protocol"] = "https" + return self.announceTrackerHttp(*args, **kwargs) + + def announceTrackerHttp(self, tracker_address, mode="start", num_want=10, protocol="http"): + tracker_ip, tracker_port = tracker_address.rsplit(":", 1) + if helper.getIpType(tracker_ip) in self.getOpenedServiceTypes(): + port = self.fileserver_port + else: + port = 1 + params = { + 'info_hash': hashlib.sha1(self.site.address).digest(), + 'peer_id': self.peer_id, 'port': port, + 'uploaded': 0, 'downloaded': 0, 'left': 431102370, 'compact': 1, 'numwant': num_want, + 'event': 'started' + } + + url = protocol + "://" + tracker_address + "?" + urllib.urlencode(params) + + s = time.time() + response = None + # Load url + if config.tor == "always" or config.trackers_proxy != "disable": + timeout = 60 + else: + timeout = 30 + + with gevent.Timeout(timeout, False): # Make sure of timeout + req = self.httpRequest(url) + response = req.read() + req.fp._sock.recv = None # Hacky avoidance of memory leak for older python versions + req.close() + req = None + + if not response: + raise AnnounceError("No response after %.0fs" % (time.time() - s)) + + # Decode peers + try: + peer_data = bencode.decode(response)["peers"] + response = None + peer_count = len(peer_data) / 6 + peers = [] + for peer_offset in xrange(peer_count): + off = 6 * peer_offset + peer = peer_data[off:off + 6] + addr, port = struct.unpack('!LH', peer) + peers.append({"addr": socket.inet_ntoa(struct.pack('!L', addr)), "port": port}) + except Exception as err: + raise AnnounceError("Invalid response: %r (%s)" % (response, err)) + + return peers + + @util.Noparallel(blocking=False) + def announcePex(self, query_num=2, need_num=5): + peers = self.site.getConnectedPeers() + if len(peers) == 0: # Wait 3s for connections + time.sleep(3) + peers = self.site.getConnectedPeers() + + if len(peers) == 0: # Small number of connected peers for this site, connect to any + peers = self.site.peers.values() + need_num = 10 + + random.shuffle(peers) + done = 0 + total_added = 0 + for peer in peers: + num_added = peer.pex(need_num=need_num) + if num_added is not False: + done += 1 + total_added += num_added + if num_added: + self.site.worker_manager.onPeers() + self.site.updateWebsocket(peers_added=num_added) + if done == query_num: + break + self.site.log.debug("Pex result: from %s peers got %s new peers." % (done, total_added)) + + def updateWebsocket(self, **kwargs): + if kwargs: + param = {"event": kwargs.items()[0]} + else: + param = None + + for ws in self.site.websockets: + ws.event("announcerChanged", self.site, param) diff --git a/src/Site/SiteManager.py b/src/Site/SiteManager.py index ee22aa1b8..04461cd7a 100644 --- a/src/Site/SiteManager.py +++ b/src/Site/SiteManager.py @@ -18,27 +18,40 @@ class SiteManager(object): def __init__(self): self.log = logging.getLogger("SiteManager") self.log.debug("SiteManager created.") - self.sites = None + self.sites = {} + self.sites_changed = int(time.time()) self.loaded = False gevent.spawn(self.saveTimer) - atexit.register(self.save) + atexit.register(lambda: self.save(recalculate_size=True)) # Load all sites from data/sites.json - def load(self, cleanup=True): + def load(self, cleanup=True, startup=False): self.log.debug("Loading sites...") self.loaded = False from Site import Site - if self.sites is None: - self.sites = {} address_found = [] added = 0 # Load new adresses for address, settings in json.load(open("%s/sites.json" % config.data_dir)).iteritems(): - if address not in self.sites and os.path.isfile("%s/%s/content.json" % (config.data_dir, address)): - s = time.time() - self.sites[address] = Site(address, settings=settings) - self.log.debug("Loaded site %s in %.3fs" % (address, time.time() - s)) - added += 1 + if address not in self.sites: + if os.path.isfile("%s/%s/content.json" % (config.data_dir, address)): + # Root content.json exists, try load site + s = time.time() + try: + site = Site(address, settings=settings) + site.content_manager.contents.get("content.json") + except Exception, err: + self.log.debug("Error loading site %s: %s" % (address, err)) + continue + self.sites[address] = site + self.log.debug("Loaded site %s in %.3fs" % (address, time.time() - s)) + added += 1 + elif startup: + # No site directory, start download + self.log.debug("Found new site in sites.json: %s" % address) + gevent.spawn(self.need, address, settings=settings) + added += 1 + address_found.append(address) # Remove deleted adresses @@ -49,16 +62,27 @@ def load(self, cleanup=True): self.log.debug("Removed site: %s" % address) # Remove orpan sites from contentdb - for row in ContentDb.getContentDb().execute("SELECT * FROM site"): - if row["address"] not in self.sites: - self.log.info("Deleting orphan site from content.db: %s" % row["address"]) - ContentDb.getContentDb().execute("DELETE FROM site WHERE ?", {"address": row["address"]}) + content_db = ContentDb.getContentDb() + for row in content_db.execute("SELECT * FROM site").fetchall(): + address = row["address"] + if address not in self.sites: + self.log.info("Deleting orphan site from content.db: %s" % address) + + try: + content_db.execute("DELETE FROM site WHERE ?", {"address": address}) + except Exception as err: + self.log.error("Can't delete site %s from content_db: %s" % (address, err)) + + if address in content_db.site_ids: + del content_db.site_ids[address] + if address in content_db.sites: + del content_db.sites[address] if added: self.log.debug("SiteManager added %s sites" % added) self.loaded = True - def save(self): + def save(self, recalculate_size=False): if not self.sites: self.log.debug("Save skipped: No sites found") return @@ -68,44 +92,52 @@ def save(self): s = time.time() data = {} # Generate data file + s = time.time() for address, site in self.list().iteritems(): - site.settings["size"] = site.content_manager.getTotalSize() # Update site size + if recalculate_size: + site.settings["size"], site.settings["size_optional"] = site.content_manager.getTotalSize() # Update site size data[address] = site.settings - data[address]["cache"] = {} - data[address]["cache"]["bad_files"] = site.bad_files - data[address]["cache"]["hashfield"] = site.content_manager.hashfield.tostring().encode("base64") + data[address]["cache"] = site.getSettingsCache() + time_generate = time.time() - s + s = time.time() if data: helper.atomicWrite("%s/sites.json" % config.data_dir, json.dumps(data, indent=2, sort_keys=True)) else: self.log.debug("Save error: No data") + time_write = time.time() - s + # Remove cache from site settings for address, site in self.list().iteritems(): site.settings["cache"] = {} - self.log.debug("Saved sites in %.2fs" % (time.time() - s)) + self.log.debug("Saved sites in %.2fs (generate: %.2fs, write: %.2fs)" % (time.time() - s, time_generate, time_write)) def saveTimer(self): while 1: time.sleep(60 * 10) - self.save() + self.save(recalculate_size=True) # Checks if its a valid address def isAddress(self, address): return re.match("^[A-Za-z0-9]{26,35}$", address) + def isDomain(self, address): + return False + # Return: Site object or None if not found def get(self, address): - if self.sites is None: # Not loaded yet - self.log.debug("Getting new site: %s)..." % address) + if not self.loaded: # Not loaded yet + self.log.debug("Loading site: %s)..." % address) self.load() return self.sites.get(address) # Return or create site and start download site files - def need(self, address, all_file=True): + def need(self, address, all_file=True, settings=None): from Site import Site site = self.get(address) if not site: # Site not exist yet + self.sites_changed = int(time.time()) # Try to find site with differect case for recover_address, recover_site in self.sites.items(): if recover_address.lower() == address.lower(): @@ -114,20 +146,19 @@ def need(self, address, all_file=True): if not self.isAddress(address): return False # Not address: %s % address self.log.debug("Added new site: %s" % address) - site = Site(address) + config.loadTrackersFile() + site = Site(address, settings=settings) self.sites[address] = site if not site.settings["serving"]: # Maybe it was deleted before site.settings["serving"] = True site.saveSettings() if all_file: # Also download user files on first sync site.download(check_size=True, blind_includes=True) - else: - if all_file: - site.download() return site def delete(self, address): + self.sites_changed = int(time.time()) self.log.debug("SiteManager deleted site: %s" % address) del(self.sites[address]) # Delete from sites.json @@ -135,12 +166,16 @@ def delete(self, address): # Lazy load sites def list(self): - if self.sites is None: # Not loaded yet + if not self.loaded: # Not loaded yet self.log.debug("Sites not loaded yet...") - self.load() + self.load(startup=True) return self.sites site_manager = SiteManager() # Singletone -peer_blacklist = [("127.0.0.1", config.fileserver_port)] # Dont add this peers +if config.action == "main": # Don't connect / add myself to peerlist + peer_blacklist = [("127.0.0.1", config.fileserver_port), ("::1", config.fileserver_port)] +else: + peer_blacklist = [] + diff --git a/src/Site/SiteStorage.py b/src/Site/SiteStorage.py index f9d7c1877..6c9d48c1b 100644 --- a/src/Site/SiteStorage.py +++ b/src/Site/SiteStorage.py @@ -4,23 +4,27 @@ import json import time import sys +from collections import defaultdict import sqlite3 import gevent.event +import util +from util import SafeRe from Db import Db from Debug import Debug from Config import config from util import helper from Plugin import PluginManager +from Translate import translate as _ @PluginManager.acceptPlugins class SiteStorage(object): def __init__(self, site, allow_create=True): self.site = site - self.directory = "%s/%s" % (config.data_dir, self.site.address) # Site data diretory - self.allowed_dir = os.path.abspath(self.directory.decode(sys.getfilesystemencoding())) # Only serve file within this dir + self.directory = u"%s/%s" % (config.data_dir, self.site.address) # Site data diretory + self.allowed_dir = os.path.abspath(self.directory) # Only serve file within this dir self.log = site.log self.db = None # Db class self.db_checked = False # Checked db tables since startup @@ -33,25 +37,18 @@ def __init__(self, site, allow_create=True): else: raise Exception("Directory not exists: %s" % self.directory) - # Load db from dbschema.json - def openDb(self, check=True): - try: + def getDbFile(self): + if self.isFile("dbschema.json"): schema = self.loadJson("dbschema.json") - db_path = self.getPath(schema["db_file"]) - except Exception, err: - raise Exception("dbschema.json is not a valid JSON: %s" % err) - - if check: - if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: # Not exist or null - self.rebuildDb() - - if not self.db: - self.db = Db(schema, db_path) + return schema["db_file"] + else: + return False - if check and not self.db_checked: - changed_tables = self.db.checkTables() - if changed_tables: - self.rebuildDb(delete_db=False) # TODO: only update the changed table datas + # Create new databaseobject with the site's schema + def openDb(self, close_idle=False): + schema = self.getDbSchema() + db_path = self.getPath(schema["db_file"]) + return Db(schema, db_path, close_idle=close_idle) def closeDb(self): if self.db: @@ -59,6 +56,13 @@ def closeDb(self): self.event_db_busy = None self.db = None + def getDbSchema(self): + try: + schema = self.loadJson("dbschema.json") + except Exception, err: + raise Exception("dbschema.json is not a valid JSON: %s" % err) + return schema + # Return db class def getDb(self): if not self.db: @@ -66,30 +70,51 @@ def getDb(self): self.site.needFile("dbschema.json", priority=3) self.has_db = self.isFile("dbschema.json") # Recheck if dbschema exist if self.has_db: - self.openDb() + schema = self.getDbSchema() + db_path = self.getPath(schema["db_file"]) + if not os.path.isfile(db_path) or os.path.getsize(db_path) == 0: + self.rebuildDb() + + if self.db: + self.db.close() + self.db = self.openDb(close_idle=True) + + changed_tables = self.db.checkTables() + if changed_tables: + self.rebuildDb(delete_db=False) # TODO: only update the changed table datas + return self.db + def updateDbFile(self, inner_path, file=None, cur=None): + path = self.getPath(inner_path) + return self.getDb().updateJson(path, file, cur) + # Return possible db files for the site def getDbFiles(self): + found = 0 for content_inner_path, content in self.site.content_manager.contents.iteritems(): # content.json file itself - if self.isFile(content_inner_path): # Missing content.json file - yield self.getPath(content_inner_path), self.open(content_inner_path) + if self.isFile(content_inner_path): + yield content_inner_path, self.getPath(content_inner_path) else: self.log.error("[MISSING] %s" % content_inner_path) # Data files in content.json content_inner_path_dir = helper.getDirname(content_inner_path) # Content.json dir relative to site - for file_relative_path in content["files"].keys(): - if not file_relative_path.endswith(".json"): + for file_relative_path in content.get("files", {}).keys() + content.get("files_optional", {}).keys(): + if not file_relative_path.endswith(".json") and not file_relative_path.endswith("json.gz"): continue # We only interesed in json files file_inner_path = content_inner_path_dir + file_relative_path # File Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / if self.isFile(file_inner_path): - yield self.getPath(file_inner_path), self.open(file_inner_path) + yield file_inner_path, self.getPath(file_inner_path) else: self.log.error("[MISSING] %s" % file_inner_path) + found += 1 + if found % 100 == 0: + time.sleep(0.000001) # Context switch to avoid UI block # Rebuild sql cache + @util.Noparallel() def rebuildDb(self, delete_db=True): self.has_db = self.isFile("dbschema.json") if not self.has_db: @@ -106,32 +131,51 @@ def rebuildDb(self, delete_db=True): os.unlink(db_path) except Exception, err: self.log.error("Delete error: %s" % err) - self.db = None - self.openDb(check=False) + + db = self.openDb() self.log.info("Creating tables...") - self.db.checkTables() - self.log.info("Importing data...") - cur = self.db.getCursor() + db.checkTables() + cur = db.getCursor() cur.execute("BEGIN") cur.logging = False found = 0 s = time.time() + self.log.info("Getting db files...") + db_files = list(self.getDbFiles()) + self.log.info("Importing data...") try: - for file_inner_path, file in self.getDbFiles(): + if len(db_files) > 100: + self.site.messageWebsocket(_["Database rebuilding...
    Imported {0} of {1} files..."].format("0000", len(db_files)), "rebuild", 0) + for file_inner_path, file_path in db_files: try: - if self.db.loadJson(file_inner_path, file=file, cur=cur): + if self.updateDbFile(file_inner_path, file=open(file_path, "rb"), cur=cur): found += 1 except Exception, err: self.log.error("Error importing %s: %s" % (file_inner_path, Debug.formatException(err))) + if found and found % 100 == 0: + self.site.messageWebsocket( + _["Database rebuilding...
    Imported {0} of {1} files..."].format(found, len(db_files)), + "rebuild", + int(float(found) / len(db_files) * 100) + ) + time.sleep(0.000001) # Context switch to avoid UI block finally: cur.execute("END") + cur.close() + db.close() + self.log.info("Closing Db: %s" % db) + if len(db_files) > 100: + self.site.messageWebsocket(_["Database rebuilding...
    Imported {0} of {1} files..."].format(found, len(db_files)), "rebuild", 100) self.log.info("Imported %s data file in %ss" % (found, time.time() - s)) self.event_db_busy.set(True) # Event done, notify waiters self.event_db_busy = None # Clear event # Execute sql query or rebuild on dberror def query(self, query, params=None): + if not query.strip().upper().startswith("SELECT"): + raise Exception("Only SELECT query supported") + if self.event_db_busy: # Db not ready for queries self.log.debug("Wating for db...") self.event_db_busy.get() # Wait for event @@ -147,8 +191,13 @@ def query(self, query, params=None): return res # Open file object - def open(self, inner_path, mode="rb"): - return open(self.getPath(inner_path), mode) + def open(self, inner_path, mode="rb", create_dirs=False): + file_path = self.getPath(inner_path) + if create_dirs: + file_dir = os.path.dirname(file_path) + if not os.path.isdir(file_dir): + os.makedirs(file_dir) + return open(file_path, mode) # Open file object def read(self, inner_path, mode="r"): @@ -166,8 +215,11 @@ def write(self, inner_path, content): with open(file_path, "wb") as file: shutil.copyfileobj(content, file) # Write buff to disk else: # Simple string - with open(file_path, "wb") as file: - file.write(content) + if inner_path == "content.json" and os.path.isfile(file_path): + helper.atomicWrite(file_path, content) + else: + with open(file_path, "wb") as file: + file.write(content) del content self.onUpdated(inner_path) @@ -195,32 +247,56 @@ def rename(self, inner_path_before, inner_path_after): raise err # List files from a directory - def list(self, dir_inner_path): + def walk(self, dir_inner_path, ignore=None): directory = self.getPath(dir_inner_path) for root, dirs, files in os.walk(directory): root = root.replace("\\", "/") root_relative_path = re.sub("^%s" % re.escape(directory), "", root).lstrip("/") for file_name in files: if root_relative_path: # Not root dir - yield root_relative_path + "/" + file_name + file_relative_path = root_relative_path + "/" + file_name else: - yield file_name + file_relative_path = file_name + + if ignore and SafeRe.match(ignore, file_relative_path): + continue + + yield file_relative_path + + # Don't scan directory that is in the ignore pattern + if ignore: + dirs_filtered = [] + for dir_name in dirs: + if root_relative_path: + dir_relative_path = root_relative_path + "/" + dir_name + else: + dir_relative_path = dir_name + + if ignore == ".*" or re.match(".*([|(]|^)%s([|)]|$)" % re.escape(dir_relative_path + "/.*"), ignore): + continue + + dirs_filtered.append(dir_name) + dirs[:] = dirs_filtered + + # list directories in a directory + def list(self, dir_inner_path): + directory = self.getPath(dir_inner_path) + return os.listdir(directory) # Site content updated def onUpdated(self, inner_path, file=None): - file_path = self.getPath(inner_path) # Update Sql cache if inner_path == "dbschema.json": self.has_db = self.isFile("dbschema.json") # Reopen DB to check changes if self.has_db: self.closeDb() - self.openDb() - elif not config.disable_db and inner_path.endswith(".json") and self.has_db: # Load json file to db + self.getDb() + elif not config.disable_db and (inner_path.endswith(".json") or inner_path.endswith(".json.gz")) and self.has_db: # Load json file to db if config.verbose: - self.log.debug("Loading json file to db: %s" % inner_path) + self.log.debug("Loading json file to db: %s (file: %s)" % (inner_path, file)) try: - self.getDb().loadJson(file_path, file) + self.updateDbFile(inner_path, file) except Exception, err: self.log.error("Json %s load error: %s" % (inner_path, Debug.formatException(err))) self.closeDb() @@ -230,8 +306,7 @@ def loadJson(self, inner_path): with self.open(inner_path) as file: return json.load(file) - # Write formatted json file - def writeJson(self, inner_path, data): + def formatJson(self, data): content = json.dumps(data, indent=1, sort_keys=True) # Make it a little more compact by removing unnecessary white space @@ -243,11 +318,23 @@ def compact_dict(match): content = re.sub("\{(\n[^,\[\{]{10,100}?)\}[, ]{0,2}\n", compact_dict, content, flags=re.DOTALL) + def compact_list(match): + if "\n" in match.group(0): + stripped_lines = re.sub("\n[ ]*", "", match.group(1)) + return match.group(0).replace(match.group(1), stripped_lines) + else: + return match.group(0) + + content = re.sub("\[([^\[\{]{2,300}?)\][, ]{0,2}\n", compact_list, content, flags=re.DOTALL) + # Remove end of line whitespace content = re.sub("(?m)[ ]+$", "", content) + return content + # Write formatted json file + def writeJson(self, inner_path, data): # Write to disk - self.write(inner_path, content) + self.write(inner_path, self.formatJson(data)) # Get file size def getSize(self, inner_path): @@ -275,53 +362,68 @@ def getPath(self, inner_path): if not inner_path: return self.directory - file_path = u"%s/%s" % (self.directory, inner_path) + if "../" in inner_path: + raise Exception(u"File not allowed: %s" % inner_path) - if ".." in file_path: - raise Exception(u"File not allowed: %s" % file_path) - return file_path + return u"%s/%s" % (self.directory, inner_path) # Get site dir relative path def getInnerPath(self, path): if path == self.directory: inner_path = "" else: - inner_path = re.sub("^%s/" % re.escape(self.directory), "", path) + if path.startswith(self.directory): + inner_path = path[len(self.directory) + 1:] + else: + raise Exception(u"File not allowed: %s" % path) return inner_path # Verify all files sha512sum using content.json def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True): bad_files = [] + back = defaultdict(int) + back["bad_files"] = bad_files i = 0 + self.log.debug("Verifing files...") if not self.site.content_manager.contents.get("content.json"): # No content.json, download it first self.log.debug("VerifyFile content.json not exists") self.site.needFile("content.json", update=True) # Force update to fix corrupt file self.site.content_manager.loadContent() # Reload content.json for content_inner_path, content in self.site.content_manager.contents.items(): + back["num_content"] += 1 i += 1 if i % 50 == 0: time.sleep(0.0001) # Context switch to avoid gevent hangs if not os.path.isfile(self.getPath(content_inner_path)): # Missing content.json file + back["num_content_missing"] += 1 self.log.debug("[MISSING] %s" % content_inner_path) bad_files.append(content_inner_path) for file_relative_path in content.get("files", {}).keys(): + back["num_file"] += 1 file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / file_path = self.getPath(file_inner_path) if not os.path.isfile(file_path): + back["num_file_missing"] += 1 self.log.debug("[MISSING] %s" % file_inner_path) bad_files.append(file_inner_path) continue if quick_check: ok = os.path.getsize(file_path) == content["files"][file_relative_path]["size"] + if not ok: + err = "Invalid size" else: - ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) + try: + ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) + except Exception, err: + ok = False if not ok: - self.log.debug("[CHANGED] %s" % file_inner_path) + back["num_file_invalid"] += 1 + self.log.debug("[INVALID] %s: %s" % (file_inner_path, err)) if add_changed or content.get("cert_user_id"): # If updating own site only add changed user files bad_files.append(file_inner_path) @@ -329,13 +431,17 @@ def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True): optional_added = 0 optional_removed = 0 for file_relative_path in content.get("files_optional", {}).keys(): + back["num_optional"] += 1 file_node = content["files_optional"][file_relative_path] file_inner_path = helper.getDirname(content_inner_path) + file_relative_path # Relative to site dir file_inner_path = file_inner_path.strip("/") # Strip leading / file_path = self.getPath(file_inner_path) + hash_id = self.site.content_manager.hashfield.getHashId(file_node["sha512"]) if not os.path.isfile(file_path): - if self.site.content_manager.hashfield.hasHash(file_node["sha512"]): - self.site.content_manager.optionalRemove(file_inner_path, file_node["sha512"], file_node["size"]) + if self.site.content_manager.isDownloaded(file_inner_path, hash_id): + back["num_optional_removed"] += 1 + self.log.debug("[OPTIONAL REMOVED] %s" % file_inner_path) + self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"]) if add_optional: bad_files.append(file_inner_path) continue @@ -343,15 +449,21 @@ def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True): if quick_check: ok = os.path.getsize(file_path) == content["files_optional"][file_relative_path]["size"] else: - ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) + try: + ok = self.site.content_manager.verifyFile(file_inner_path, open(file_path, "rb")) + except Exception, err: + ok = False if ok: - if not self.site.content_manager.hashfield.hasHash(file_node["sha512"]): - self.site.content_manager.optionalDownloaded(file_inner_path, file_node["sha512"], file_node["size"]) + if not self.site.content_manager.isDownloaded(file_inner_path, hash_id): + back["num_optional_added"] += 1 + self.site.content_manager.optionalDownloaded(file_inner_path, hash_id, file_node["size"]) optional_added += 1 + self.log.debug("[OPTIONAL FOUND] %s" % file_inner_path) else: - if self.site.content_manager.hashfield.hasHash(file_node["sha512"]): - self.site.content_manager.optionalRemove(file_inner_path, file_node["sha512"], file_node["size"]) + if self.site.content_manager.isDownloaded(file_inner_path, hash_id): + back["num_optional_removed"] += 1 + self.site.content_manager.optionalRemoved(file_inner_path, hash_id, file_node["size"]) optional_removed += 1 bad_files.append(file_inner_path) self.log.debug("[OPTIONAL CHANGED] %s" % file_inner_path) @@ -362,17 +474,19 @@ def verifyFiles(self, quick_check=False, add_optional=False, add_changed=True): (content_inner_path, len(content["files"]), quick_check, optional_added, optional_removed) ) + self.site.content_manager.contents.db.processDelayed() time.sleep(0.0001) # Context switch to avoid gevent hangs - return bad_files + return back # Check and try to fix site files integrity def updateBadFiles(self, quick_check=True): s = time.time() - bad_files = self.verifyFiles( + res = self.verifyFiles( quick_check, add_optional=self.site.isDownloadable(""), add_changed=not self.site.settings.get("own") # Don't overwrite changed files if site owned ) + bad_files = res["bad_files"] self.site.bad_files = {} if bad_files: for bad_file in bad_files: @@ -384,7 +498,7 @@ def deleteFiles(self): self.log.debug("Deleting files from content.json...") files = [] # Get filenames for content_inner_path in self.site.content_manager.contents.keys(): - content = self.site.content_manager.contents[content_inner_path] + content = self.site.content_manager.contents.get(content_inner_path, {}) files.append(content_inner_path) # Add normal files for file_relative_path in content.get("files", {}).keys(): @@ -415,8 +529,8 @@ def deleteFiles(self): os.unlink(path) break except Exception, err: - self.log.error("Error removing %s: %s, try #%s" % (path, err, retry)) - time.sleep(float(retry)/10) + self.log.error(u"Error removing %s: %s, try #%s" % (inner_path, err, retry)) + time.sleep(float(retry) / 10) self.onUpdated(inner_path, False) self.log.debug("Deleting empty dirs...") @@ -424,10 +538,10 @@ def deleteFiles(self): for dir in dirs: path = os.path.join(root, dir) if os.path.isdir(path) and os.listdir(path) == []: - os.removedirs(path) + os.rmdir(path) self.log.debug("Removing %s" % path) if os.path.isdir(self.directory) and os.listdir(self.directory) == []: - os.removedirs(self.directory) # Remove sites directory if empty + os.rmdir(self.directory) # Remove sites directory if empty if os.path.isdir(self.directory): self.log.debug("Some unknown file remained in site data dir: %s..." % self.directory) diff --git a/src/Site/__init__.py b/src/Site/__init__.py index cc830ae86..07a21d405 100644 --- a/src/Site/__init__.py +++ b/src/Site/__init__.py @@ -1,2 +1,3 @@ from Site import Site -from SiteStorage import SiteStorage \ No newline at end of file +from SiteStorage import SiteStorage +from SiteAnnouncer import SiteAnnouncer diff --git a/src/Test/Spy.py b/src/Test/Spy.py index c017dea9f..8d8f18000 100644 --- a/src/Test/Spy.py +++ b/src/Test/Spy.py @@ -1,17 +1,20 @@ class Spy: - def __init__(self, obj, func_name): - self.obj = obj - self.func_name = func_name - self.func_original = getattr(self.obj, func_name) - self.calls = [] + def __init__(self, obj, func_name): + self.obj = obj + self.func_name = func_name + self.func_original = getattr(self.obj, func_name) + self.calls = [] - def __enter__(self, *args, **kwargs): - def loggedFunc(cls, *args, **kwags): - print "Logging", self, args, kwargs - self.calls.append(args) - return self.func_original(cls, *args, **kwargs) - setattr(self.obj, self.func_name, loggedFunc) - return self.calls + def __enter__(self, *args, **kwargs): + def loggedFunc(cls, *args, **kwargs): + call = dict(enumerate(args, 1)) + call[0] = cls + call.update(kwargs) + print "Logging", call + self.calls.append(call) + return self.func_original(cls, *args, **kwargs) + setattr(self.obj, self.func_name, loggedFunc) + return self.calls - def __exit__(self, *args, **kwargs): - setattr(self.obj, self.func_name, self.func_original) \ No newline at end of file + def __exit__(self, *args, **kwargs): + setattr(self.obj, self.func_name, self.func_original) \ No newline at end of file diff --git a/src/Test/TestConnectionServer.py b/src/Test/TestConnectionServer.py index 7492925fc..b17d3f761 100644 --- a/src/Test/TestConnectionServer.py +++ b/src/Test/TestConnectionServer.py @@ -1,43 +1,65 @@ import time +import socket import gevent import pytest +import mock from Crypt import CryptConnection from Connection import ConnectionServer +from Config import config @pytest.mark.usefixtures("resetSettings") class TestConnection: + def testIpv6(self, file_server6): + assert ":" in file_server6.ip + + client = ConnectionServer(file_server6.ip, 1545) + connection = client.getConnection(file_server6.ip, 1544) + + assert connection.ping() + + # Close connection + connection.close() + client.stop() + time.sleep(0.01) + assert len(file_server6.connections) == 0 + + # Should not able to reach on ipv4 ip + with pytest.raises(socket.error) as err: + client = ConnectionServer("127.0.0.1", 1545) + connection = client.getConnection("127.0.0.1", 1544) + def testSslConnection(self, file_server): - file_server.ip_incoming = {} # Reset flood protection - client = ConnectionServer("127.0.0.1", 1545) + client = ConnectionServer(file_server.ip, 1545) assert file_server != client # Connect to myself - connection = client.getConnection("127.0.0.1", 1544) + with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips + connection = client.getConnection(file_server.ip, 1544) + assert len(file_server.connections) == 1 - assert len(file_server.ips) == 1 assert connection.handshake assert connection.crypt + # Close connection connection.close() client.stop() time.sleep(0.01) assert len(file_server.connections) == 0 - assert len(file_server.ips) == 0 def testRawConnection(self, file_server): - file_server.ip_incoming = {} # Reset flood protection - client = ConnectionServer("127.0.0.1", 1545) + client = ConnectionServer(file_server.ip, 1545) assert file_server != client # Remove all supported crypto crypt_supported_bk = CryptConnection.manager.crypt_supported CryptConnection.manager.crypt_supported = [] - connection = client.getConnection("127.0.0.1", 1544) + with mock.patch('Config.config.ip_local', return_value=[]): # SSL not used for local ips + connection = client.getConnection(file_server.ip, 1544) assert len(file_server.connections) == 1 assert not connection.crypt @@ -51,9 +73,8 @@ def testRawConnection(self, file_server): CryptConnection.manager.crypt_supported = crypt_supported_bk def testPing(self, file_server, site): - file_server.ip_incoming = {} # Reset flood protection - client = ConnectionServer("127.0.0.1", 1545) - connection = client.getConnection("127.0.0.1", 1544) + client = ConnectionServer(file_server.ip, 1545) + connection = client.getConnection(file_server.ip, 1544) assert connection.ping() @@ -61,38 +82,36 @@ def testPing(self, file_server, site): client.stop() def testGetConnection(self, file_server): - file_server.ip_incoming = {} # Reset flood protection - client = ConnectionServer("127.0.0.1", 1545) - connection = client.getConnection("127.0.0.1", 1544) + client = ConnectionServer(file_server.ip, 1545) + connection = client.getConnection(file_server.ip, 1544) # Get connection by ip/port - connection2 = client.getConnection("127.0.0.1", 1544) + connection2 = client.getConnection(file_server.ip, 1544) assert connection == connection2 # Get connection by peerid - assert not client.getConnection("127.0.0.1", 1544, peer_id="notexists", create=False) - connection2 = client.getConnection("127.0.0.1", 1544, peer_id=connection.handshake["peer_id"], create=False) + assert not client.getConnection(file_server.ip, 1544, peer_id="notexists", create=False) + connection2 = client.getConnection(file_server.ip, 1544, peer_id=connection.handshake["peer_id"], create=False) assert connection2 == connection connection.close() client.stop() def testFloodProtection(self, file_server): - file_server.ip_incoming = {} # Reset flood protection whitelist = file_server.whitelist # Save for reset file_server.whitelist = [] # Disable 127.0.0.1 whitelist - client = ConnectionServer("127.0.0.1", 1545) + client = ConnectionServer(file_server.ip, 1545) # Only allow 6 connection in 1 minute for reconnect in range(6): - connection = client.getConnection("127.0.0.1", 1544) + connection = client.getConnection(file_server.ip, 1544) assert connection.handshake connection.close() # The 7. one will timeout with pytest.raises(gevent.Timeout): with gevent.Timeout(0.1): - connection = client.getConnection("127.0.0.1", 1544) + connection = client.getConnection(file_server.ip, 1544) # Reset whitelist file_server.whitelist = whitelist diff --git a/src/Test/TestContent.py b/src/Test/TestContent.py index df62aec47..e4afb91e8 100644 --- a/src/Test/TestContent.py +++ b/src/Test/TestContent.py @@ -5,10 +5,14 @@ import pytest from Crypt import CryptBitcoin +from Content.ContentManager import VerifyError, SignError +from util.SafeRe import UnsafePatternError @pytest.mark.usefixtures("resetSettings") class TestContent: + privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" + def testInclude(self, site): # Rules defined in parent content.json rules = site.content_manager.getRules("data/test_include/content.json") @@ -35,7 +39,6 @@ def testInclude(self, site): assert site.content_manager.getValidSigners("content.json") == ["1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] def testInlcudeLimits(self, site): - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # Data validation data_dict = { "files": { @@ -48,7 +51,7 @@ def testInlcudeLimits(self, site): } # Normal data - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) # Reset @@ -56,34 +59,42 @@ def testInlcudeLimits(self, site): # Too large data_dict["files"]["data.json"]["size"] = 200000 # Emulate 2MB sized data.json - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} data = StringIO(json.dumps(data_dict)) - assert not site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) + assert "Include too large" in str(err) + # Reset data_dict["files"]["data.json"]["size"] = 505 del data_dict["signs"] # Not allowed file data_dict["files"]["notallowed.exe"] = data_dict["files"]["data.json"] - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} data = StringIO(json.dumps(data_dict)) - assert not site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) + assert "File not allowed" in str(err) + # Reset del data_dict["files"]["notallowed.exe"] del data_dict["signs"] # Should work again - data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), privatekey)} + data_dict["signs"] = {"1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict), self.privatekey)} data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) @pytest.mark.parametrize("inner_path", ["content.json", "data/test_include/content.json", "data/users/content.json"]) def testSign(self, site, inner_path): # Bad privatekey - assert not site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False) + with pytest.raises(SignError) as err: + site.content_manager.sign(inner_path, privatekey="5aaa3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMnaa", filewrite=False) + assert "Private key invalid" in str(err) # Good privatekey - content = site.content_manager.sign(inner_path, privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False) + content = site.content_manager.sign(inner_path, privatekey=self.privatekey, filewrite=False) content_old = site.content_manager.contents[inner_path] # Content before the sign assert not content_old == content # Timestamp changed assert site.address in content["signs"] # Used the site's private key to sign @@ -108,10 +119,10 @@ def testSignOptionalFiles(self, site): assert len(site.content_manager.hashfield) == 0 site.content_manager.contents["content.json"]["optional"] = "((data/img/zero.*))" - content_optional = site.content_manager.sign(privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False) + content_optional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True) del site.content_manager.contents["content.json"]["optional"] - content_nooptional = site.content_manager.sign(privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv", filewrite=False) + content_nooptional = site.content_manager.sign(privatekey=self.privatekey, filewrite=False, remove_missing_optional=True) assert len(content_nooptional.get("files_optional", {})) == 0 # No optional files if no pattern assert len(content_optional["files_optional"]) > 0 @@ -120,6 +131,9 @@ def testSignOptionalFiles(self, site): def testFileInfo(self, site): assert "sha512" in site.content_manager.getFileInfo("index.html") + assert site.content_manager.getFileInfo("data/img/domain.png")["content_inner_path"] == "content.json" + assert site.content_manager.getFileInfo("data/users/hello.png")["content_inner_path"] == "data/users/content.json" + assert site.content_manager.getFileInfo("data/users/content.json")["content_inner_path"] == "data/users/content.json" assert not site.content_manager.getFileInfo("notexist") # Optional file @@ -136,14 +150,13 @@ def testFileInfo(self, site): assert file_info_optional["optional"] is True def testVerify(self, site): - privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" inner_path = "data/test_include/content.json" data_dict = site.storage.loadJson(inner_path) data = StringIO(json.dumps(data_dict)) # Re-sign data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) @@ -151,27 +164,82 @@ def testVerify(self, site): data_dict["address"] = "Othersite" del data_dict["signs"] data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } data = StringIO(json.dumps(data_dict)) - assert not site.content_manager.verifyFile(inner_path, data, ignore_same=False) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile(inner_path, data, ignore_same=False) + assert "Wrong site address" in str(err) # Wrong inner_path data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" data_dict["inner_path"] = "content.json" del data_dict["signs"] data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } data = StringIO(json.dumps(data_dict)) - assert not site.content_manager.verifyFile(inner_path, data, ignore_same=False) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile(inner_path, data, ignore_same=False) + assert "Wrong inner_path" in str(err) # Everything right again data_dict["address"] = "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT" data_dict["inner_path"] = inner_path del data_dict["signs"] data_dict["signs"] = { - "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) } data = StringIO(json.dumps(data_dict)) assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) + + def testVerifyInnerPath(self, site): + inner_path = "content.json" + data_dict = site.storage.loadJson(inner_path) + + for good_relative_path in ["data.json", "out/data.json", "Any File [by none] (1).jpg"]: + data_dict["files"] = {good_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}} + + if "sign" in data_dict: + del data_dict["sign"] + del data_dict["signs"] + data_dict["signs"] = { + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) + } + data = StringIO(json.dumps(data_dict)) + assert site.content_manager.verifyFile(inner_path, data, ignore_same=False) + + for bad_relative_path in ["../data.json", "data/" * 100, "invalid|file.jpg"]: + data_dict["files"] = {bad_relative_path: {"sha512": "369d4e780cc80504285f13774ca327fe725eed2d813aad229e62356b07365906", "size": 505}} + + if "sign" in data_dict: + del data_dict["sign"] + del data_dict["signs"] + data_dict["signs"] = { + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), self.privatekey) + } + data = StringIO(json.dumps(data_dict)) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile(inner_path, data, ignore_same=False) + assert "Invalid relative path" in str(err) + + @pytest.mark.parametrize("key", ["ignore", "optional"]) + def testSignUnsafePattern(self, site, key): + site.content_manager.contents["content.json"][key] = "([a-zA-Z]+)*" + with pytest.raises(UnsafePatternError) as err: + site.content_manager.sign("content.json", privatekey=self.privatekey, filewrite=False) + assert "Potentially unsafe" in str(err) + + + def testVerifyUnsafePattern(self, site): + site.content_manager.contents["content.json"]["includes"]["data/test_include/content.json"]["files_allowed"] = "([a-zA-Z]+)*" + with pytest.raises(UnsafePatternError) as err: + with site.storage.open("data/test_include/content.json") as data: + site.content_manager.verifyFile("data/test_include/content.json", data, ignore_same=False) + assert "Potentially unsafe" in str(err) + + site.content_manager.contents["data/users/content.json"]["user_contents"]["permission_rules"]["([a-zA-Z]+)*"] = {"max_size": 0} + with pytest.raises(UnsafePatternError) as err: + with site.storage.open("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") as data: + site.content_manager.verifyFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", data, ignore_same=False) + assert "Potentially unsafe" in str(err) diff --git a/src/Test/TestContentUser.py b/src/Test/TestContentUser.py index f51f6bb4b..46d8bdeff 100644 --- a/src/Test/TestContentUser.py +++ b/src/Test/TestContentUser.py @@ -4,14 +4,17 @@ import pytest from Crypt import CryptBitcoin +from Content.ContentManager import VerifyError, SignError @pytest.mark.usefixtures("resetSettings") -class TestUserContent: +class TestContentUser: def testSigners(self, site): # File info for not existing user file file_info = site.content_manager.getFileInfo("data/users/notexist/data.json") assert file_info["content_inner_path"] == "data/users/notexist/content.json" + file_info = site.content_manager.getFileInfo("data/users/notexist/a/b/data.json") + assert file_info["content_inner_path"] == "data/users/notexist/content.json" valid_signers = site.content_manager.getValidSigners("data/users/notexist/content.json") assert valid_signers == ["14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet", "notexist", "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT"] @@ -29,8 +32,7 @@ def testSigners(self, site): valid_signers = site.content_manager.getValidSigners("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content) assert '1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT' in valid_signers # The site address assert '14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet' in valid_signers # Admin user defined in data/users/content.json - assert not '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' in valid_signers # The user itself - + assert '1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C' not in valid_signers # The user itself def testRules(self, site): # We going to manipulate it this test rules based on data/users/content.json @@ -63,6 +65,55 @@ def testRules(self, site): rules = site.content_manager.getRules("data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_content) assert "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" not in rules["signers"] + def testRulesAddress(self, site): + user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json" + user_content = site.storage.loadJson(user_inner_path) + + rules = site.content_manager.getRules(user_inner_path, user_content) + assert rules["max_size"] == 10000 + assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" in rules["signers"] + + users_content = site.content_manager.contents["data/users/content.json"] + + # Ban user based on address + users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = False + rules = site.content_manager.getRules(user_inner_path, user_content) + assert "1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9" not in rules["signers"] + + # Change max allowed size + users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000} + rules = site.content_manager.getRules(user_inner_path, user_content) + assert rules["max_size"] == 20000 + + def testVerifyAddress(self, site): + privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT + user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json" + data_dict = site.storage.loadJson(user_inner_path) + users_content = site.content_manager.contents["data/users/content.json"] + + data = StringIO(json.dumps(data_dict)) + assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + + # Test error on 15k data.json + data_dict["files"]["data.json"]["size"] = 1024 * 15 + del data_dict["signs"] # Remove signs before signing + data_dict["signs"] = { + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) + } + data = StringIO(json.dumps(data_dict)) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + assert "Include too large" in str(err) + + # Give more space based on address + users_content["user_contents"]["permissions"]["1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9"] = {"max_size": 20000} + del data_dict["signs"] # Remove signs before signing + data_dict["signs"] = { + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) + } + data = StringIO(json.dumps(data_dict)) + assert site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + def testVerify(self, site): privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT user_inner_path = "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/content.json" @@ -81,7 +132,10 @@ def testVerify(self, site): rules = site.content_manager.getRules(user_inner_path, data_dict) assert rules["max_size"] == 0 data = StringIO(json.dumps(data_dict)) - assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + assert "Include too large" in str(err) users_content["user_contents"]["permission_rules"][".*"]["max_size"] = 10000 # Reset # Test max optional size exception @@ -101,7 +155,9 @@ def testVerify(self, site): "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } data = StringIO(json.dumps(data_dict)) - assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + assert "Include optional files too large" in str(err) data_dict["files_optional"]["peanut-butter-jelly-time.gif"]["size"] = 1024 * 1024 # Reset # hello.exe = Not allowed @@ -111,18 +167,21 @@ def testVerify(self, site): "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } data = StringIO(json.dumps(data_dict)) - assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + assert "Optional file not allowed" in str(err) del data_dict["files_optional"]["hello.exe"] # Reset # Includes not allowed in user content - data_dict["includes"] = { "other.json": { } } + data_dict["includes"] = {"other.json": {}} del data_dict["signs"] # Remove signs before signing data_dict["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(data_dict, sort_keys=True), privatekey) } data = StringIO(json.dumps(data_dict)) - assert not site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) - + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile(user_inner_path, data, ignore_same=False) + assert "Includes not allowed" in str(err) def testCert(self, site): # user_addr = "1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C" @@ -174,10 +233,13 @@ def testCert(self, site): # Test banned user cert_user_id = user_content["cert_user_id"] # My username site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] = False - assert not site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False - ) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", + StringIO(json.dumps(signed_content)), ignore_same=False + ) + assert "Valid signs: 0/1" in str(err) + del site.content_manager.contents["data/users/content.json"]["user_contents"]["permissions"][cert_user_id] # Reset # Test invalid cert user_content["cert_sign"] = CryptBitcoin.sign( @@ -186,10 +248,12 @@ def testCert(self, site): signed_content = site.content_manager.sign( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False ) - assert not site.content_manager.verifyFile( - "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(signed_content)), ignore_same=False - ) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", + StringIO(json.dumps(signed_content)), ignore_same=False + ) + assert "Invalid cert" in str(err) # Test banned user, signed by the site owner user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % ( @@ -236,15 +300,91 @@ def testMissingCert(self, site): StringIO(json.dumps(signed_content)), ignore_same=False ) + # Test invalid cert_user_id + user_content["cert_user_id"] = "nodomain" + user_content["signs"] = { + "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv) + } + signed_content = site.content_manager.sign( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False + ) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", + StringIO(json.dumps(signed_content)), ignore_same=False + ) + assert "Invalid domain in cert_user_id" in str(err) + # Test removed cert - # user_content["cert_sign"] + del user_content["cert_user_id"] del user_content["cert_auth_type"] del user_content["signs"] # Remove signs before signing user_content["signs"] = { "1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT": CryptBitcoin.sign(json.dumps(user_content, sort_keys=True), user_priv) } - print "--- Signed content", user_content - assert not site.content_manager.verifyFile( + signed_content = site.content_manager.sign( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False + ) + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", + StringIO(json.dumps(signed_content)), ignore_same=False + ) + assert "Missing cert_user_id" in str(err) + + + def testCertSignersPattern(self, site): + user_priv = "5Kk7FSA63FC2ViKmKLuBxk9gQkaQ5713hKq8LmFAf4cVeXh6K6A" + cert_priv = "5JusJDSjHaMHwUjDT3o6eQ54pA6poo8La5fAgn1wNc3iK59jxjA" # For 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet + + user_content = site.content_manager.contents["data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json"] + rules_content = site.content_manager.contents["data/users/content.json"] + + # Override valid cert signers for the test + rules_content["user_contents"]["cert_signers_pattern"] = "14wgQ[0-9][A-Z]" + + # Sign a valid cert + user_content["cert_user_id"] = "certuser@14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" + user_content["cert_sign"] = CryptBitcoin.sign("1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C#%s/%s" % ( + user_content["cert_auth_type"], + "certuser" + ), cert_priv) + signed_content = site.content_manager.sign( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", user_priv, filewrite=False + ) + + assert site.content_manager.verifyFile( "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", - StringIO(json.dumps(user_content)), ignore_same=False - ) \ No newline at end of file + StringIO(json.dumps(signed_content)), ignore_same=False + ) + + # Cert does not matches the pattern + rules_content["user_contents"]["cert_signers_pattern"] = "14wgX[0-9][A-Z]" + + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", + StringIO(json.dumps(signed_content)), ignore_same=False + ) + assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err) + + # Removed cert_signers_pattern + del rules_content["user_contents"]["cert_signers_pattern"] + + with pytest.raises(VerifyError) as err: + site.content_manager.verifyFile( + "data/users/1J6UrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json", + StringIO(json.dumps(signed_content)), ignore_same=False + ) + assert "Invalid cert signer: 14wgQ4VDDZNoRMFF4yCDuTrBSHmYhL3bet" in str(err) + + + def testNewFile(self, site): + privatekey = "5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv" # For 1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT + inner_path = "data/users/1NEWrZMkarjVg5ax9W4qThir3BFUikbW6C/content.json" + + site.storage.writeJson(inner_path, {"test": "data"}) + site.content_manager.sign(inner_path, privatekey) + assert "test" in site.storage.loadJson(inner_path) + + site.storage.delete(inner_path) diff --git a/src/Test/TestCryptBitcoin.py b/src/Test/TestCryptBitcoin.py index bebf906ee..a6009679d 100644 --- a/src/Test/TestCryptBitcoin.py +++ b/src/Test/TestCryptBitcoin.py @@ -34,10 +34,12 @@ def testSign(self): assert address_bad != "1MpDMxFeDUkiHohxx9tbGLeEGEuR4ZNsJz" # Text signing - sign = CryptBitcoin.sign("hello", privatekey) + for pad_len in range(0, 300, 10): + pad = pad_len * "!" + sign = CryptBitcoin.sign("hello" + pad, privatekey) - assert CryptBitcoin.verify("hello", address, sign) - assert not CryptBitcoin.verify("not hello", address, sign) + assert CryptBitcoin.verify("hello" + pad, address, sign) + assert not CryptBitcoin.verify("not hello" + pad, address, sign) # Signed by bad privatekey sign_bad = CryptBitcoin.sign("hello", privatekey_bad) diff --git a/src/Test/TestDb.py b/src/Test/TestDb.py index 9558a490a..d821fe5de 100644 --- a/src/Test/TestDb.py +++ b/src/Test/TestDb.py @@ -1,9 +1,5 @@ -import os import cStringIO as StringIO -from Config import config -from Db import Db - class TestDb: def testCheckTables(self, db): @@ -53,13 +49,60 @@ def testQueries(self, db): {"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]} ).fetchone()["num"] == 2 + # Test multiple select using named params + assert db.execute("SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id", {"test_id": [1, 2, 3]}).fetchone()["num"] == 3 + assert db.execute( + "SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title = :title", + {"test_id": [1, 2, 3], "title": "Test #2"} + ).fetchone()["num"] == 1 + assert db.execute( + "SELECT COUNT(*) AS num FROM test WHERE test_id IN :test_id AND title IN :title", + {"test_id": [1, 2, 3], "title": ["Test #2", "Test #3", "Test #4"]} + ).fetchone()["num"] == 2 + + # Large ammount of IN values + assert db.execute( + "SELECT COUNT(*) AS num FROM test WHERE ?", + {"not__test_id": range(2, 3000)} + ).fetchone()["num"] == 2 + assert db.execute( + "SELECT COUNT(*) AS num FROM test WHERE ?", + {"test_id": range(50, 3000)} + ).fetchone()["num"] == 50 + + assert db.execute( + "SELECT COUNT(*) AS num FROM test WHERE ?", + {"not__title": ["Test #%s" % i for i in range(50, 3000)]} + ).fetchone()["num"] == 50 + # Test named parameter escaping assert db.execute( "SELECT COUNT(*) AS num FROM test WHERE test_id = :test_id AND title LIKE :titlelike", {"test_id": 1, "titlelike": "Test%"} ).fetchone()["num"] == 1 - def testLoadJson(self, db): + def testEscaping(self, db): + # Test insert + for i in range(100): + db.execute("INSERT INTO test ?", {"test_id": i, "title": "Test '\" #%s" % i}) + + assert db.execute( + "SELECT COUNT(*) AS num FROM test WHERE ?", + {"title": "Test '\" #1"} + ).fetchone()["num"] == 1 + + assert db.execute( + "SELECT COUNT(*) AS num FROM test WHERE ?", + {"title": ["Test '\" #%s" % i for i in range(0, 50)]} + ).fetchone()["num"] == 50 + + assert db.execute( + "SELECT COUNT(*) AS num FROM test WHERE ?", + {"not__title": ["Test '\" #%s" % i for i in range(50, 3000)]} + ).fetchone()["num"] == 50 + + + def testUpdateJson(self, db): f = StringIO.StringIO() f.write(""" { @@ -69,6 +112,21 @@ def testLoadJson(self, db): } """) f.seek(0) - assert db.loadJson(db.db_dir + "data.json", f) == True + assert db.updateJson(db.db_dir + "data.json", f) is True assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 1 assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 1 + + def testUnsafePattern(self, db): + db.schema["maps"] = {"[A-Za-z.]*": db.schema["maps"]["data.json"]} # Only repetition of . supported + f = StringIO.StringIO() + f.write(""" + { + "test": [ + {"test_id": 1, "title": "Test 1 title", "extra col": "Ignore it"} + ] + } + """) + f.seek(0) + assert db.updateJson(db.db_dir + "data.json", f) is False + assert db.execute("SELECT COUNT(*) AS num FROM test_importfilter").fetchone()["num"] == 0 + assert db.execute("SELECT COUNT(*) AS num FROM test").fetchone()["num"] == 0 diff --git a/src/Test/TestFileRequest.py b/src/Test/TestFileRequest.py index e53c83dc6..5210ce820 100644 --- a/src/Test/TestFileRequest.py +++ b/src/Test/TestFileRequest.py @@ -13,17 +13,21 @@ class TestFileRequest: def testGetFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection - client = ConnectionServer("127.0.0.1", 1545) + client = ConnectionServer(file_server.ip, 1545) - connection = client.getConnection("127.0.0.1", 1544) + connection = client.getConnection(file_server.ip, 1544) file_server.sites[site.address] = site + # Normal request response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0}) assert "sign" in response["body"] + response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": site.storage.getSize("content.json")}) + assert "sign" in response["body"] + # Invalid file response = connection.request("getFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}) - assert "No such file or directory" in response["error"] + assert "File read error" in response["error"] # Location over size response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 1024 * 1024}) @@ -31,15 +35,26 @@ def testGetFile(self, file_server, site): # Stream from parent dir response = connection.request("getFile", {"site": site.address, "inner_path": "../users.json", "location": 0}) - assert "File not allowed" in response["error"] + assert "File read error" in response["error"] + + # Invalid site + response = connection.request("getFile", {"site": "", "inner_path": "users.json", "location": 0}) + assert "Unknown site" in response["error"] + + response = connection.request("getFile", {"site": ".", "inner_path": "users.json", "location": 0}) + assert "Unknown site" in response["error"] + + # Invalid size + response = connection.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0, "file_size": 1234}) + assert "File size does not match" in response["error"] connection.close() client.stop() def testStreamFile(self, file_server, site): file_server.ip_incoming = {} # Reset flood protection - client = ConnectionServer("127.0.0.1", 1545) - connection = client.getConnection("127.0.0.1", 1544) + client = ConnectionServer(file_server.ip, 1545) + connection = client.getConnection(file_server.ip, 1544) file_server.sites[site.address] = site buff = StringIO.StringIO() @@ -50,7 +65,7 @@ def testStreamFile(self, file_server, site): # Invalid file buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "invalid.file", "location": 0}, buff) - assert "No such file or directory" in response["error"] + assert "File read error" in response["error"] # Location over size buff = StringIO.StringIO() @@ -62,31 +77,42 @@ def testStreamFile(self, file_server, site): # Stream from parent dir buff = StringIO.StringIO() response = connection.request("streamFile", {"site": site.address, "inner_path": "../users.json", "location": 0}, buff) - assert "File not allowed" in response["error"] + assert "File read error" in response["error"] connection.close() client.stop() def testPex(self, file_server, site, site_temp): file_server.sites[site.address] = site - client = FileServer("127.0.0.1", 1545) + client = FileServer(file_server.ip, 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client - connection = client.getConnection("127.0.0.1", 1544) + connection = client.getConnection(file_server.ip, 1544) # Add new fake peer to site - fake_peer = site.addPeer("1.2.3.4", 11337, return_peer=True) + fake_peer = site.addPeer(file_server.ip_external, 11337, return_peer=True) # Add fake connection to it - fake_peer.connection = Connection(file_server, "1.2.3.4", 11337) + fake_peer.connection = Connection(file_server, file_server.ip_external, 11337) fake_peer.connection.last_recv_time = time.time() assert fake_peer in site.getConnectablePeers() # Add file_server as peer to client - peer_file_server = site_temp.addPeer("127.0.0.1", 1544) + peer_file_server = site_temp.addPeer(file_server.ip, 1544) - assert "1.2.3.4:11337" not in site_temp.peers + assert "%s:11337" % file_server.ip_external not in site_temp.peers assert peer_file_server.pex() - assert "1.2.3.4:11337" in site_temp.peers + assert "%s:11337" % file_server.ip_external in site_temp.peers + + # Should not exchange private peers from local network + fake_peer_private = site.addPeer("192.168.0.1", 11337, return_peer=True) + assert fake_peer_private not in site.getConnectablePeers(allow_private=False) + fake_peer_private.connection = Connection(file_server, "192.168.0.1", 11337) + fake_peer_private.connection.last_recv_time = time.time() + + assert "192.168.0.1:11337" not in site_temp.peers + assert not peer_file_server.pex() + assert "192.168.0.1:11337" not in site_temp.peers + connection.close() client.stop() diff --git a/src/Test/TestHelper.py b/src/Test/TestHelper.py index 28f7f6fb8..82b3258a1 100644 --- a/src/Test/TestHelper.py +++ b/src/Test/TestHelper.py @@ -1,4 +1,5 @@ import socket +import struct import pytest from util import helper @@ -12,8 +13,17 @@ def testShellquote(self): assert helper.shellquote("hel'lo", 'hel"lo') == ('"hel\'lo"', '"hello"') def testPackAddress(self): - assert len(helper.packAddress("1.1.1.1", 1)) == 6 - assert helper.unpackAddress(helper.packAddress("1.1.1.1", 1)) == ("1.1.1.1", 1) + for port in [1, 1000, 65535]: + for ip in ["1.1.1.1", "127.0.0.1", "0.0.0.0", "255.255.255.255", "192.168.1.1"]: + assert len(helper.packAddress(ip, port)) == 6 + assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port) + + for ip in ["1:2:3:4:5:6:7:8", "::1", "2001:19f0:6c01:e76:5400:1ff:fed6:3eca", "2001:4860:4860::8888"]: + assert len(helper.packAddress(ip, port)) == 18 + assert helper.unpackAddress(helper.packAddress(ip, port)) == (ip, port) + + with pytest.raises(struct.error) as err: + helper.packAddress("1.1.1.1", 100000) with pytest.raises(socket.error): helper.packAddress("999.1.1.1", 1) @@ -27,7 +37,7 @@ def testGetDirname(self): assert helper.getDirname("") == "" assert helper.getDirname("content.json") == "" assert helper.getDirname("data/users/") == "data/users/" - assert helper.getDirname("/data/users/content.json") == "/data/users/" + assert helper.getDirname("/data/users/content.json") == "data/users/" def testGetFilename(self): @@ -36,4 +46,17 @@ def testGetFilename(self): assert helper.getFilename("") == "" assert helper.getFilename("content.json") == "content.json" assert helper.getFilename("data/users/") == "" - assert helper.getFilename("/data/users/content.json") == "content.json" \ No newline at end of file + assert helper.getFilename("/data/users/content.json") == "content.json" + + def testIsIp(self): + assert helper.isIp("1.2.3.4") + assert helper.isIp("255.255.255.255") + assert not helper.isIp("any.host") + assert not helper.isIp("1.2.3.4.com") + assert not helper.isIp("1.2.3.4.any.host") + + def testIsPrivateIp(self): + assert helper.isPrivateIp("192.168.1.1") + assert not helper.isPrivateIp("1.1.1.1") + assert helper.isPrivateIp("fe80::44f0:3d0:4e6:637c") + assert not helper.isPrivateIp("fca5:95d6:bfde:d902:8951:276e:1111:a22c") # cjdns diff --git a/src/Test/TestMsgpack.py b/src/Test/TestMsgpack.py new file mode 100644 index 000000000..3665a0a4f --- /dev/null +++ b/src/Test/TestMsgpack.py @@ -0,0 +1,46 @@ +import cStringIO as StringIO + +import msgpack +import pytest + +from Config import config +from util import StreamingMsgpack + + +class TestMsgpack: + test_data = {"cmd": "fileGet", "params": {"site": "1Site"}} + + def testUnpackinkg(self): + assert msgpack.unpackb(msgpack.packb(self.test_data)) == self.test_data + + @pytest.mark.parametrize("unpacker_class", [msgpack.Unpacker, msgpack.fallback.Unpacker]) + def testUnpacker(self, unpacker_class): + unpacker = unpacker_class() + + data = msgpack.packb(self.test_data) + data += msgpack.packb(self.test_data) + + messages = [] + for char in data: + unpacker.feed(char) + for message in unpacker: + messages.append(message) + + assert len(messages) == 2 + assert messages[0] == self.test_data + assert messages[0] == messages[1] + + def testStreaming(self): + f = StreamingMsgpack.FilePart("%s/users.json" % config.data_dir) + f.read_bytes = 10 + + data = {"cmd": "response", "params": f} + + out_buff = StringIO.StringIO() + StreamingMsgpack.stream(data, out_buff.write) + out_buff.seek(0) + + data_packb = {"cmd": "response", "params": open("%s/users.json" % config.data_dir).read(10)} + + out_buff.seek(0) + assert msgpack.unpackb(out_buff.read()) == data_packb diff --git a/src/Test/TestNoparallel.py b/src/Test/TestNoparallel.py index abc4c7679..5c0242f5d 100644 --- a/src/Test/TestNoparallel.py +++ b/src/Test/TestNoparallel.py @@ -1,10 +1,8 @@ import time +import util import gevent -from gevent import monkey -monkey.patch_all() -import util class ExampleClass(object): def __init__(self): @@ -12,14 +10,21 @@ def __init__(self): @util.Noparallel() def countBlocking(self, num=5): - for i in range(1, num+1): + for i in range(1, num + 1): time.sleep(0.01) self.counted += 1 return "counted:%s" % i + @util.Noparallel(queue=True, ignore_class=True) + def countQueue(self, num=5): + for i in range(1, num + 1): + time.sleep(0.1) + self.counted += 1 + return "counted:%s" % i + @util.Noparallel(blocking=False) def countNoblocking(self, num=5): - for i in range(1, num+1): + for i in range(1, num + 1): time.sleep(0.01) self.counted += 1 return "counted:%s" % i @@ -39,7 +44,7 @@ def testBlocking(self): ] assert obj2.countBlocking() == "counted:5" # The call is ignored as obj2.countBlocking already counting, but block until its finishes gevent.joinall(threads) - assert [thread.value for thread in threads] == ["counted:5","counted:5","counted:5","counted:5"] # Check the return value for every call + assert [thread.value for thread in threads] == ["counted:5", "counted:5", "counted:5", "counted:5"] # Check the return value for every call obj2.countBlocking() # Allow to call again as obj2.countBlocking finished assert obj1.counted == 5 @@ -60,3 +65,56 @@ def testNoblocking(self): obj1.countNoblocking().join() # Allow again and wait until finishes assert obj1.counted == 10 + + def testQueue(self): + obj1 = ExampleClass() + + gevent.spawn(obj1.countQueue, num=10) + gevent.spawn(obj1.countQueue, num=10) + gevent.spawn(obj1.countQueue, num=10) + + time.sleep(3.0) + assert obj1.counted == 20 # No multi-queue supported + + obj2 = ExampleClass() + gevent.spawn(obj2.countQueue, num=10) + gevent.spawn(obj2.countQueue, num=10) + + time.sleep(1.5) # Call 1 finished, call 2 still working + assert 10 < obj2.counted < 20 + + gevent.spawn(obj2.countQueue, num=10) + time.sleep(2.0) + + assert obj2.counted == 30 + + def testQueueOverload(self): + obj1 = ExampleClass() + + threads = [] + for i in range(10000): + thread = gevent.spawn(obj1.countQueue, num=5) + threads.append(thread) + + gevent.joinall(threads) + assert obj1.counted == 5 * 2 # Only called twice + + def testIgnoreClass(self): + obj1 = ExampleClass() + obj2 = ExampleClass() + + threads = [ + gevent.spawn(obj1.countQueue), + gevent.spawn(obj1.countQueue), + gevent.spawn(obj1.countQueue), + gevent.spawn(obj2.countQueue), + gevent.spawn(obj2.countQueue) + ] + s = time.time() + gevent.joinall(threads) + + # Queue limited to 2 calls (very call takes counts to 5 and takes 0.05 sec) + assert obj1.counted + obj2.counted == 10 + + taken = time.time() - s + assert 1.1 > taken >= 1.0 # 2 * 0.5s count = ~1s diff --git a/src/Test/TestPeer.py b/src/Test/TestPeer.py index f0d810339..32ca13230 100644 --- a/src/Test/TestPeer.py +++ b/src/Test/TestPeer.py @@ -13,15 +13,14 @@ @pytest.mark.usefixtures("resetTempSettings") class TestPeer: def testPing(self, file_server, site, site_temp): - file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site - client = FileServer("127.0.0.1", 1545) + client = FileServer(file_server.ip, 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client - connection = client.getConnection("127.0.0.1", 1544) + connection = client.getConnection(file_server.ip, 1544) # Add file_server as peer to client - peer_file_server = site_temp.addPeer("127.0.0.1", 1544) + peer_file_server = site_temp.addPeer(file_server.ip, 1544) assert peer_file_server.ping() is not None @@ -33,18 +32,17 @@ def testPing(self, file_server, site, site_temp): client.stop() def testDownloadFile(self, file_server, site, site_temp): - file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site - client = FileServer("127.0.0.1", 1545) + client = FileServer(file_server.ip, 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client - connection = client.getConnection("127.0.0.1", 1544) + connection = client.getConnection(file_server.ip, 1544) # Add file_server as peer to client - peer_file_server = site_temp.addPeer("127.0.0.1", 1544) + peer_file_server = site_temp.addPeer(file_server.ip, 1544) # Testing streamFile - buff = peer_file_server.streamFile(site_temp.address, "content.json") + buff = peer_file_server.getFile(site_temp.address, "content.json", streaming=True) assert "sign" in buff.getvalue() # Testing getFile @@ -79,15 +77,16 @@ def testHashfield(self, site): def testHashfieldExchange(self, file_server, site, site_temp): server1 = file_server - server1.ip_incoming = {} # Reset flood protection server1.sites[site.address] = site - server2 = FileServer("127.0.0.1", 1545) + site.connection_server = server1 + + server2 = FileServer(file_server.ip, 1545) server2.sites[site_temp.address] = site_temp site_temp.connection_server = server2 site.storage.verifyFiles(quick_check=True) # Find what optional files we have # Add file_server as peer to client - server2_peer1 = site_temp.addPeer("127.0.0.1", 1544) + server2_peer1 = site_temp.addPeer(file_server.ip, 1544) # Check if hashfield has any files assert len(site.content_manager.hashfield) > 0 @@ -99,7 +98,7 @@ def testHashfieldExchange(self, file_server, site, site_temp): # Test force push new hashfield site_temp.content_manager.hashfield.appendHash("AABB") - server1_peer2 = site.addPeer("127.0.0.1", 1545, return_peer=True) + server1_peer2 = site.addPeer(file_server.ip, 1545, return_peer=True) with Spy.Spy(FileRequest, "route") as requests: assert len(server1_peer2.hashfield) == 0 server2_peer1.sendMyHashfield() @@ -128,19 +127,18 @@ def testHashfieldExchange(self, file_server, site, site_temp): server2.stop() def testFindHash(self, file_server, site, site_temp): - file_server.ip_incoming = {} # Reset flood protection file_server.sites[site.address] = site - client = FileServer("127.0.0.1", 1545) + client = FileServer(file_server.ip, 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Add file_server as peer to client - peer_file_server = site_temp.addPeer("127.0.0.1", 1544) + peer_file_server = site_temp.addPeer(file_server.ip, 1544) assert peer_file_server.findHashIds([1234]) == {} # Add fake peer with requred hash - fake_peer_1 = site.addPeer("1.2.3.4", 1544) + fake_peer_1 = site.addPeer(file_server.ip_external, 1544) fake_peer_1.hashfield.append(1234) fake_peer_2 = site.addPeer("1.2.3.5", 1545) fake_peer_2.hashfield.append(1234) @@ -149,14 +147,13 @@ def testFindHash(self, file_server, site, site_temp): fake_peer_3.hashfield.append(1235) fake_peer_3.hashfield.append(1236) - assert peer_file_server.findHashIds([1234, 1235]) == { - 1234: [('1.2.3.4', 1544), ('1.2.3.5', 1545)], - 1235: [('1.2.3.5', 1545), ('1.2.3.6', 1546)] - } + res = peer_file_server.findHashIds([1234, 1235]) + assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545)]) + assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)]) # Test my address adding site.content_manager.hashfield.append(1234) res = peer_file_server.findHashIds([1234, 1235]) - assert res[1234] == [('1.2.3.4', 1544), ('1.2.3.5', 1545), ("127.0.0.1", 1544)] - assert res[1235] == [('1.2.3.5', 1545), ('1.2.3.6', 1546)] \ No newline at end of file + assert sorted(res[1234]) == sorted([(file_server.ip_external, 1544), ("1.2.3.5", 1545), (file_server.ip, 1544)]) + assert sorted(res[1235]) == sorted([("1.2.3.5", 1545), ("1.2.3.6", 1546)]) diff --git a/src/Test/TestRateLimit.py b/src/Test/TestRateLimit.py index a823d88b6..fafa5f1ab 100644 --- a/src/Test/TestRateLimit.py +++ b/src/Test/TestRateLimit.py @@ -1,8 +1,6 @@ import time import gevent -from gevent import monkey -monkey.patch_all() from util import RateLimit @@ -39,6 +37,7 @@ def testCall(self): assert RateLimit.call("counting", allowed_again=0.1, func=obj1.count) == "counted" assert around(time.time() - s, 0.1) # Delays second call within interval assert obj1.counted == 2 + time.sleep(0.1) # Wait the cooldown time # Call 3 times async s = time.time() @@ -52,6 +51,11 @@ def testCall(self): assert [thread.value for thread in threads] == ["counted", "counted", "counted"] assert around(time.time() - s, 0.2) + # Wait 0.1s cooldown + assert not RateLimit.isAllowed("counting", 0.1) + time.sleep(0.11) + assert RateLimit.isAllowed("counting", 0.1) + # No queue = instant again s = time.time() assert RateLimit.isAllowed("counting", 0.1) @@ -85,12 +89,12 @@ def testCallAsync(self): assert obj1.counted == 2 assert obj1.last_called == "call #4" - # Allowed again instantly - assert RateLimit.isAllowed("counting async", 0.1) + # Just called, not allowed again + assert not RateLimit.isAllowed("counting async", 0.1) s = time.time() - RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join() + t4 = RateLimit.callAsync("counting async", allowed_again=0.1, func=obj1.count, back="call #5").join() assert obj1.counted == 3 - assert around(time.time() - s, 0.0) + assert around(time.time() - s, 0.1) assert not RateLimit.isAllowed("counting async", 0.1) time.sleep(0.11) assert RateLimit.isAllowed("counting async", 0.1) diff --git a/src/Test/TestSafeRe.py b/src/Test/TestSafeRe.py new file mode 100644 index 000000000..b80371232 --- /dev/null +++ b/src/Test/TestSafeRe.py @@ -0,0 +1,24 @@ +from util import SafeRe + +import pytest + + +class TestSafeRe: + def testSafeMatch(self): + assert SafeRe.match( + "((js|css)/(?!all.(js|css))|data/users/.*db|data/users/.*/.*|data/archived|.*.py)", + "js/ZeroTalk.coffee" + ) + assert SafeRe.match(".+/data.json", "data/users/1J3rJ8ecnwH2EPYa6MrgZttBNc61ACFiCj/data.json") + + @pytest.mark.parametrize("pattern", ["([a-zA-Z]+)*", "(a|aa)+*", "(a|a?)+", "(.*a){10}", "((?!json).)*$", r"(\w+\d+)+C"]) + def testUnsafeMatch(self, pattern): + with pytest.raises(SafeRe.UnsafePatternError) as err: + SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!") + assert "Potentially unsafe" in str(err) + + @pytest.mark.parametrize("pattern", ["^(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)(.*a)$"]) + def testUnsafeRepetition(self, pattern): + with pytest.raises(SafeRe.UnsafePatternError) as err: + SafeRe.match(pattern, "aaaaaaaaaaaaaaaaaaaaaaaa!") + assert "More than" in str(err) diff --git a/src/Test/TestSite.py b/src/Test/TestSite.py index 71d1706fd..b9a40064a 100644 --- a/src/Test/TestSite.py +++ b/src/Test/TestSite.py @@ -26,14 +26,18 @@ def testClone(self, site): assert new_site.storage.isFile("index.html") assert new_site.storage.isFile("data/users/content.json") assert new_site.storage.isFile("data/zeroblog.db") - assert new_site.storage.verifyFiles() == [] # No bad files allowed + assert new_site.storage.verifyFiles()["bad_files"] == [] # No bad files allowed assert new_site.storage.query("SELECT * FROM keyvalue WHERE key = 'title'").fetchone()["value"] == "MyZeroBlog" + # Optional files should be removed + + assert len(new_site.storage.loadJson("content.json").get("files_optional", {})) == 0 + # Test re-cloning (updating) # Changes in non-data files should be overwritten new_site.storage.write("index.html", "this will be overwritten") - assert new_site.storage.read("index.html"), "this will be overwritten" + assert new_site.storage.read("index.html") == "this will be overwritten" # Changes in data file should be kept after re-cloning changed_contentjson = new_site.storage.loadJson("content.json") diff --git a/src/Test/TestSiteDownload.py b/src/Test/TestSiteDownload.py index 9b20b6f21..842cfc827 100644 --- a/src/Test/TestSiteDownload.py +++ b/src/Test/TestSiteDownload.py @@ -16,8 +16,6 @@ @pytest.mark.usefixtures("resetSettings") class TestSiteDownload: def testDownload(self, file_server, site, site_temp): - file_server.ip_incoming = {} # Reset flood protection - assert site.storage.directory == config.data_dir + "/" + site.address assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address @@ -26,29 +24,29 @@ def testDownload(self, file_server, site, site_temp): file_server.sites[site.address] = site # Init client server - client = ConnectionServer("127.0.0.1", 1545) + client = ConnectionServer(file_server.ip, 1545) site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - site_temp.addPeer("127.0.0.1", 1544) + site_temp.addPeer(file_server.ip, 1544) with Spy.Spy(FileRequest, "route") as requests: def boostRequest(inner_path): # I really want these file if inner_path == "index.html": - site_temp.needFile("data/img/multiuser.png", priority=9, blocking=False) - site_temp.needFile("data/img/direct_domains.png", priority=10, blocking=False) + site_temp.needFile("data/img/multiuser.png", priority=15, blocking=False) + site_temp.needFile("data/img/direct_domains.png", priority=15, blocking=False) site_temp.onFileDone.append(boostRequest) site_temp.download(blind_includes=True).join(timeout=5) - file_requests = [request[2]["inner_path"] for request in requests if request[0] in ("getFile", "streamFile")] + file_requests = [request[3]["inner_path"] for request in requests if request[1] in ("getFile", "streamFile")] # Test priority assert file_requests[0:2] == ["content.json", "index.html"] # Must-have files - assert file_requests[2:4] == ["data/img/direct_domains.png", "data/img/multiuser.png"] # Directly requested files + assert file_requests[2:4] == ["data/img/multiuser.png", "data/img/direct_domains.png"] # Directly requested files assert file_requests[4:6] == ["css/all.css", "js/all.js"] # Important assets assert file_requests[6] == "dbschema.json" # Database map assert "-default" in file_requests[-1] # Put default files for cloning to the end # Check files - bad_files = site_temp.storage.verifyFiles(quick_check=True) + bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] # -1 because data/users/1J6... user has invalid cert assert len(site_temp.content_manager.contents) == len(site.content_manager.contents) - 1 @@ -58,21 +56,19 @@ def boostRequest(inner_path): [connection.close() for connection in file_server.connections] def testArchivedDownload(self, file_server, site, site_temp): - file_server.ip_incoming = {} # Reset flood protection - # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server - client = FileServer("127.0.0.1", 1545) + client = FileServer(file_server.ip, 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Download normally - site_temp.addPeer("127.0.0.1", 1544) + site_temp.addPeer(file_server.ip, 1544) site_temp.download(blind_includes=True).join(timeout=5) - bad_files = site_temp.storage.verifyFiles(quick_check=True) + bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] assert not bad_files assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents @@ -94,6 +90,7 @@ def testArchivedDownload(self, file_server, site, site_temp): # Push archived update assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] site.publish() + time.sleep(0.1) site_temp.download(blind_includes=True).join(timeout=5) # Wait for download # The archived content should disappear from remote client @@ -106,20 +103,68 @@ def testArchivedDownload(self, file_server, site, site_temp): assert site_temp.storage.deleteFiles() [connection.close() for connection in file_server.connections] + def testArchivedBeforeDownload(self, file_server, site, site_temp): + # Init source server + site.connection_server = file_server + file_server.sites[site.address] = site + + # Init client server + client = FileServer(file_server.ip, 1545) + client.sites[site_temp.address] = site_temp + site_temp.connection_server = client + + # Download normally + site_temp.addPeer(file_server.ip, 1544) + site_temp.download(blind_includes=True).join(timeout=5) + bad_files = site_temp.storage.verifyFiles(quick_check=True)["bad_files"] + + assert not bad_files + assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents + assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json") + assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2 + + # Add archived data + assert not "archived_before" in site.content_manager.contents["data/users/content.json"]["user_contents"] + assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1) + + content_modification_time = site.content_manager.contents["data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json"]["modified"] + site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] = content_modification_time + site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv") + + date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived_before"] + assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1) + assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived) + assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later + + # Push archived update + assert not "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] + site.publish() + time.sleep(0.1) + site_temp.download(blind_includes=True).join(timeout=5) # Wait for download + + # The archived content should disappear from remote client + assert "archived_before" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"] + assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents + assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q") + assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1 + assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0 + + assert site_temp.storage.deleteFiles() + [connection.close() for connection in file_server.connections] + + # Test when connected peer has the optional file def testOptionalDownload(self, file_server, site, site_temp): - file_server.ip_incoming = {} # Reset flood protection - # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init client server - client = ConnectionServer("127.0.0.1", 1545) + client = ConnectionServer(file_server.ip, 1545) site_temp.connection_server = client site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net - site_temp.addPeer("127.0.0.1", 1544) + site_temp.addPeer(file_server.ip, 1544) # Download site site_temp.download(blind_includes=True).join(timeout=5) @@ -152,21 +197,24 @@ def testOptionalDownload(self, file_server, site, site_temp): # Test when connected peer does not has the file, so ask him if he know someone who has it def testFindOptional(self, file_server, site, site_temp): - file_server.ip_incoming = {} # Reset flood protection - # Init source server site.connection_server = file_server file_server.sites[site.address] = site # Init full source server (has optional files) site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT") - file_server_full = FileServer("127.0.0.1", 1546) + file_server_full = FileServer(file_server.ip, 1546) site_full.connection_server = file_server_full - gevent.spawn(lambda: ConnectionServer.start(file_server_full)) + + def listen(): + ConnectionServer.start(file_server_full) + ConnectionServer.listen(file_server_full) + + gevent.spawn(listen) time.sleep(0.001) # Port opening file_server_full.sites[site_full.address] = site_full # Add site site_full.storage.verifyFiles(quick_check=True) # Check optional files - site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server + site_full_peer = site.addPeer(file_server.ip, 1546) # Add it to source server hashfield = site_full_peer.updateHashfield() # Update hashfield assert len(site_full.content_manager.hashfield) == 8 assert hashfield @@ -179,8 +227,8 @@ def testFindOptional(self, file_server, site, site_temp): site.content_manager.hashfield.remove(hash) # Init client server - site_temp.connection_server = ConnectionServer("127.0.0.1", 1545) - site_temp.addPeer("127.0.0.1", 1544) # Add source server + site_temp.connection_server = ConnectionServer(file_server.ip, 1545) + site_temp.addPeer(file_server.ip, 1544) # Add source server # Download normal files site_temp.log.info("Start Downloading site") @@ -206,7 +254,7 @@ def testFindOptional(self, file_server, site, site_temp): threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False)) gevent.joinall(threads) - assert len([request for request in requests if request[0] == "findHashIds"]) == 1 # findHashids should call only once + assert len([request for request in requests if request[1] == "findHashIds"]) == 1 # findHashids should call only once assert site_temp.storage.isFile("data/optional.txt") assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif") @@ -214,10 +262,9 @@ def testFindOptional(self, file_server, site, site_temp): assert site_temp.storage.deleteFiles() file_server_full.stop() [connection.close() for connection in file_server.connections] + site_full.content_manager.contents.db.close() def testUpdate(self, file_server, site, site_temp): - file_server.ip_incoming = {} # Reset flood protection - assert site.storage.directory == config.data_dir + "/" + site.address assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address @@ -226,7 +273,7 @@ def testUpdate(self, file_server, site, site_temp): file_server.sites[site.address] = site # Init client server - client = FileServer("127.0.0.1", 1545) + client = FileServer(file_server.ip, 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client @@ -235,7 +282,7 @@ def testUpdate(self, file_server, site, site_temp): site_temp.announce = mock.MagicMock(return_value=True) # Connect peers - site_temp.addPeer("127.0.0.1", 1544) + site_temp.addPeer(file_server.ip, 1544) # Download site from site to site_temp site_temp.download(blind_includes=True).join(timeout=5) @@ -257,13 +304,13 @@ def testUpdate(self, file_server, site, site_temp): site.publish() time.sleep(0.1) site_temp.download(blind_includes=True).join(timeout=5) - assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 1 + assert len([request for request in requests if request[1] in ("getFile", "streamFile")]) == 1 assert site_temp.storage.open("data/data.json").read() == data_new # Close connection to avoid update spam limit site.peers.values()[0].remove() - site.addPeer("127.0.0.1", 1545) + site.addPeer(file_server.ip, 1545) site_temp.peers.values()[0].ping() # Connect back time.sleep(0.1) diff --git a/src/Test/TestSiteStorage.py b/src/Test/TestSiteStorage.py index b4d45468c..e9977e8e4 100644 --- a/src/Test/TestSiteStorage.py +++ b/src/Test/TestSiteStorage.py @@ -3,11 +3,20 @@ @pytest.mark.usefixtures("resetSettings") class TestSiteStorage: + def testWalk(self, site): + # Rootdir + walk_root = list(site.storage.walk("")) + assert "content.json" in walk_root + assert "css/all.css" in walk_root + + # Subdir + assert list(site.storage.walk("data-default")) == ["data.json", "users/content-default.json"] + def testList(self, site): # Rootdir list_root = list(site.storage.list("")) assert "content.json" in list_root - assert "css/all.css" in list_root + assert "css/all.css" not in list_root # Subdir - assert list(site.storage.list("data-default")) == ["data.json", "users/content-default.json"] + assert set(site.storage.list("data-default")) == set(["data.json", "users"]) diff --git a/src/Test/TestTor.py b/src/Test/TestTor.py index ec154f995..9479aa2f6 100644 --- a/src/Test/TestTor.py +++ b/src/Test/TestTor.py @@ -1,8 +1,11 @@ -import pytest import time +import pytest +import mock + from File import FileServer from Crypt import CryptRsa +from Config import config @pytest.mark.usefixtures("resetSettings") @pytest.mark.usefixtures("resetTempSettings") @@ -46,7 +49,7 @@ def testSignOnion(self, tor_manager): # Delete tor_manager.delOnion(address) - @pytest.mark.skipif(not pytest.config.getvalue("slow"), reason="--slow not requested (takes around ~ 1min)") + @pytest.mark.slow def testConnection(self, tor_manager, file_server, site, site_temp): file_server.tor_manager.start_onions = True address = file_server.tor_manager.getOnion(site.address) @@ -55,25 +58,25 @@ def testConnection(self, tor_manager, file_server, site, site_temp): for retry in range(5): # Wait for hidden service creation time.sleep(10) try: - connection = file_server.getConnection(address+".onion", 1544) + connection = file_server.getConnection(address + ".onion", 1544) if connection: break - except Exception, err: + except Exception as err: continue assert connection.handshake assert not connection.handshake["peer_id"] # No peer_id for Tor connections # Return the same connection without site specified - assert file_server.getConnection(address+".onion", 1544) == connection + assert file_server.getConnection(address + ".onion", 1544) == connection # No reuse for different site - assert file_server.getConnection(address+".onion", 1544, site=site) != connection - assert file_server.getConnection(address+".onion", 1544, site=site) == file_server.getConnection(address+".onion", 1544, site=site) + assert file_server.getConnection(address + ".onion", 1544, site=site) != connection + assert file_server.getConnection(address + ".onion", 1544, site=site) == file_server.getConnection(address + ".onion", 1544, site=site) site_temp.address = "1OTHERSITE" - assert file_server.getConnection(address+".onion", 1544, site=site) != file_server.getConnection(address+".onion", 1544, site=site_temp) + assert file_server.getConnection(address + ".onion", 1544, site=site) != file_server.getConnection(address + ".onion", 1544, site=site_temp) # Only allow to query from the locked site file_server.sites[site.address] = site - connection_locked = file_server.getConnection(address+".onion", 1544, site=site) + connection_locked = file_server.getConnection(address + ".onion", 1544, site=site) assert "body" in connection_locked.request("getFile", {"site": site.address, "inner_path": "content.json", "location": 0}) assert connection_locked.request("getFile", {"site": "1OTHERSITE", "inner_path": "content.json", "location": 0})["error"] == "Invalid site" @@ -82,24 +85,30 @@ def testPex(self, file_server, site, site_temp): site.connection_server = file_server file_server.sites[site.address] = site # Create a new file server to emulate new peer connecting to our peer - file_server_temp = FileServer("127.0.0.1", 1545) + file_server_temp = FileServer(file_server.ip, 1545) site_temp.connection_server = file_server_temp file_server_temp.sites[site_temp.address] = site_temp + # We will request peers from this - peer_source = site_temp.addPeer("127.0.0.1", 1544) + peer_source = site_temp.addPeer(file_server.ip, 1544) # Get ip4 peers from source site - assert peer_source.pex(need_num=10) == 1 # Need >5 to return also return non-connected peers - assert len(site_temp.peers) == 2 # Me, and the other peer site.addPeer("1.2.3.4", 1555) # Add peer to source site assert peer_source.pex(need_num=10) == 1 - assert len(site_temp.peers) == 3 + assert len(site_temp.peers) == 2 assert "1.2.3.4:1555" in site_temp.peers # Get onion peers from source site site.addPeer("bka4ht2bzxchy44r.onion", 1555) assert "bka4ht2bzxchy44r.onion:1555" not in site_temp.peers - assert peer_source.pex(need_num=10) == 1 # Need >5 to return also return non-connected peers + + # Don't add onion peers if not supported + assert "onion" not in file_server_temp.supported_ip_types + assert peer_source.pex(need_num=10) == 0 + + file_server_temp.supported_ip_types.append("onion") + assert peer_source.pex(need_num=10) == 1 + assert "bka4ht2bzxchy44r.onion:1555" in site_temp.peers def testFindHash(self, tor_manager, file_server, site, site_temp): @@ -107,12 +116,12 @@ def testFindHash(self, tor_manager, file_server, site, site_temp): file_server.sites[site.address] = site file_server.tor_manager = tor_manager - client = FileServer("127.0.0.1", 1545) + client = FileServer(file_server.ip, 1545) client.sites[site_temp.address] = site_temp site_temp.connection_server = client # Add file_server as peer to client - peer_file_server = site_temp.addPeer("127.0.0.1", 1544) + peer_file_server = site_temp.addPeer(file_server.ip, 1544) assert peer_file_server.findHashIds([1234]) == {} @@ -133,12 +142,12 @@ def testFindHash(self, tor_manager, file_server, site, site_temp): # Test my address adding site.content_manager.hashfield.append(1234) - my_onion_address = tor_manager.getOnion(site_temp.address)+".onion" res = peer_file_server.findHashIds([1234, 1235]) - assert res[1234] == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544), (my_onion_address, 1544)] + assert res[1234] == [('1.2.3.5', 1545), ("bka4ht2bzxchy44r.onion", 1544), (file_server.ip, 1544)] assert res[1235] == [('1.2.3.6', 1546), ('1.2.3.5', 1545)] def testSiteOnion(self, tor_manager): - assert tor_manager.getOnion("address1") != tor_manager.getOnion("address2") - assert tor_manager.getOnion("address1") == tor_manager.getOnion("address1") + with mock.patch.object(config, "tor", "always"): + assert tor_manager.getOnion("address1") != tor_manager.getOnion("address2") + assert tor_manager.getOnion("address1") == tor_manager.getOnion("address1") diff --git a/src/Test/TestTranslate.py b/src/Test/TestTranslate.py new file mode 100644 index 000000000..530d1bcf6 --- /dev/null +++ b/src/Test/TestTranslate.py @@ -0,0 +1,55 @@ +import os + +from Translate import Translate + +class TestTranslate: + def testTranslateStrict(self): + translate = Translate() + data = """ + translated = _("original") + not_translated = "original" + """ + data_translated = translate.translateData(data, {"_(original)": "translated"}) + assert 'translated = _("translated")' in data_translated + assert 'not_translated = "original"' in data_translated + + + def testTranslateStrictNamed(self): + translate = Translate() + data = """ + translated = _("original", "original named") + translated_other = _("original", "original other named") + not_translated = "original" + """ + data_translated = translate.translateData(data, {"_(original, original named)": "translated"}) + assert 'translated = _("translated")' in data_translated + assert 'not_translated = "original"' in data_translated + + + def testTranslateEscape(self): + _ = Translate() + _["Hello"] = "Szia" + + # Simple escaping + data = "{_[Hello]} {username}!" + username = "Hacker" + data_translated = _(data) + assert 'Szia' in data_translated + assert '<' not in data_translated + assert data_translated == "Szia Hacker<script>alert('boom')</script>!" + + # Escaping dicts + user = {"username": "Hacker"} + data = "{_[Hello]} {user[username]}!" + data_translated = _(data) + assert 'Szia' in data_translated + assert '<' not in data_translated + assert data_translated == "Szia Hacker<script>alert('boom')</script>!" + + # Escaping lists + users = [{"username": "Hacker"}] + data = "{_[Hello]} {users[0][username]}!" + data_translated = _(data) + assert 'Szia' in data_translated + assert '<' not in data_translated + assert data_translated == "Szia Hacker<script>alert('boom')</script>!" diff --git a/src/Test/TestUpnpPunch.py b/src/Test/TestUpnpPunch.py index f77d7f8db..18338bb98 100644 --- a/src/Test/TestUpnpPunch.py +++ b/src/Test/TestUpnpPunch.py @@ -128,7 +128,7 @@ def test_parse_for_errors_bad_rsp(self, httplib_response): rsp = httplib_response(status=500) with pytest.raises(upnp.IGDError) as exc: upnp._parse_for_errors(rsp) - assert 'Unable to parse' in exc.value.message + assert 'Unable to parse' in str(exc) def test_parse_for_errors_error(self, httplib_response): soap_error = ('' @@ -138,7 +138,7 @@ def test_parse_for_errors_error(self, httplib_response): rsp = httplib_response(status=500, body=soap_error) with pytest.raises(upnp.IGDError) as exc: upnp._parse_for_errors(rsp) - assert 'SOAP request error' in exc.value.message + assert 'SOAP request error' in str(exc) def test_parse_for_errors_good_rsp(self, httplib_response): rsp = httplib_response(status=200) diff --git a/src/Test/TestWeb.py b/src/Test/TestWeb.py index 72a34a5a1..10e1829d6 100644 --- a/src/Test/TestWeb.py +++ b/src/Test/TestWeb.py @@ -4,7 +4,7 @@ try: from selenium.webdriver.support.ui import WebDriverWait - from selenium.webdriver.support.expected_conditions import staleness_of + from selenium.webdriver.support.expected_conditions import staleness_of, title_is from selenium.common.exceptions import NoSuchElementException except: pass @@ -18,41 +18,61 @@ def __enter__(self): self.old_page = self.browser.find_element_by_tag_name('html') def __exit__(self, *args): - WebDriverWait(self.browser, 20).until(staleness_of(self.old_page)) + WebDriverWait(self.browser, 10).until(staleness_of(self.old_page)) +def getContextUrl(browser): + return browser.execute_script("return window.location.toString()") + + +def getUrl(url): + content = urllib.urlopen(url).read() + assert "server error" not in content.lower(), "Got a server error! " + repr(url) + return content + @pytest.mark.usefixtures("resetSettings") @pytest.mark.webtest class TestWeb: def testFileSecurity(self, site_url): - assert "Forbidden" in urllib.urlopen("%s/media/./sites.json" % site_url).read() - assert "Forbidden" in urllib.urlopen("%s/media/../config.py" % site_url).read() - assert "Forbidden" in urllib.urlopen("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url).read() - assert "Forbidden" in urllib.urlopen("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url).read() - assert "Forbidden" in urllib.urlopen("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url).read() - assert "Forbidden" in urllib.urlopen("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url).read() - assert "Forbidden" in urllib.urlopen("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url).read() - assert "Forbidden" in urllib.urlopen("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url).read() - - def testHomepage(self, browser, site_url): - browser.get("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr" % site_url) - assert browser.title == "ZeroHello - ZeroNet" + assert "Not Found" in getUrl("%s/media/sites.json" % site_url) + assert "Forbidden" in getUrl("%s/media/./sites.json" % site_url) + assert "Forbidden" in getUrl("%s/media/../config.py" % site_url) + assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) + assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) + assert "Forbidden" in getUrl("%s/media/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) + + assert "Not Found" in getUrl("%s/raw/sites.json" % site_url) + assert "Forbidden" in getUrl("%s/raw/./sites.json" % site_url) + assert "Forbidden" in getUrl("%s/raw/../config.py" % site_url) + assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) + assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) + assert "Forbidden" in getUrl("%s/raw/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) + + assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../sites.json" % site_url) + assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/..//sites.json" % site_url) + assert "Forbidden" in getUrl("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/../../zeronet.py" % site_url) + + assert "Forbidden" in getUrl("%s/content.db" % site_url) + assert "Forbidden" in getUrl("%s/./users.json" % site_url) + assert "Forbidden" in getUrl("%s/./key-rsa.pem" % site_url) + assert "Forbidden" in getUrl("%s/././././././././././//////sites.json" % site_url) def testLinkSecurity(self, browser, site_url): browser.get("%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url) - assert browser.title == "ZeroHello - ZeroNet" - assert browser.current_url == "%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url + WebDriverWait(browser, 10).until(title_is("ZeroHello - ZeroNet")) + assert getContextUrl(browser) == "%s/1EU1tbG9oC1A8jz2ouVwGZyQ5asrNsE4Vr/test/security.html" % site_url # Switch to inner frame browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) - assert "wrapper_nonce" in browser.current_url + assert "wrapper_nonce" in getContextUrl(browser) + assert browser.find_element_by_id("script_output").text == "Result: Works" browser.switch_to.default_content() # Clicking on links without target browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) with WaitForPageLoad(browser): browser.find_element_by_id("link_to_current").click() - assert "wrapper_nonce" not in browser.current_url # The browser object back to default content + assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content assert "Forbidden" not in browser.page_source # Check if we have frame inside frame browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) @@ -64,15 +84,22 @@ def testLinkSecurity(self, browser, site_url): browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) with WaitForPageLoad(browser): browser.find_element_by_id("link_to_top").click() - assert "wrapper_nonce" not in browser.current_url # The browser object back to default content + assert "wrapper_nonce" not in getContextUrl(browser) # The browser object back to default content assert "Forbidden" not in browser.page_source browser.switch_to.default_content() # Try to escape from inner_frame browser.switch_to.frame(browser.find_element_by_id("inner-iframe")) - assert "wrapper_nonce" in browser.current_url # Make sure we are inside of the inner-iframe + assert "wrapper_nonce" in getContextUrl(browser) # Make sure we are inside of the inner-iframe with WaitForPageLoad(browser): browser.execute_script("window.top.location = window.location") - assert "wrapper_nonce" in browser.current_url # We try to use nonce-ed html without iframe - assert "Forbidden" in browser.page_source # Only allow to use nonce once-time + assert "wrapper_nonce" in getContextUrl(browser) # We try to use nonce-ed html without iframe + assert "= 207.5, "Tor version >=0.2.7.5 required, found: %s" % version # Auth cookie file + res_protocol = self.send("PROTOCOLINFO", conn) cookie_match = re.search('COOKIEFILE="(.*?)"', res_protocol) - if cookie_match: - cookie_file = cookie_match.group(1) + + if config.tor_password: + res_auth = self.send('AUTHENTICATE "%s"' % config.tor_password, conn) + elif cookie_match: + cookie_file = cookie_match.group(1).decode("string-escape") auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read()) res_auth = self.send("AUTHENTICATE %s" % auth_hex, conn) else: res_auth = self.send("AUTHENTICATE", conn) assert "250 OK" in res_auth, "Authenticate error %s" % res_auth - self.status = u"Connected (%s)" % res_auth + + # Version 0.2.7.5 required because ADD_ONION support + res_version = self.send("GETINFO version", conn) + version = re.search('version=([0-9\.]+)', res_version).group(1) + assert float(version.replace(".", "0", 2)) >= 207.5, "Tor version >=0.2.7.5 required, found: %s" % version + + self.setStatus(u"Connected (%s)" % res_auth) + self.event_started.set(True) + self.starting = False + self.connecting = False self.conn = conn except Exception, err: self.conn = None - self.status = u"Error (%s)" % err - self.log.error("Tor controller connect error: %s" % Debug.formatException(err)) + self.setStatus(u"Error (%s)" % str(err).decode("utf8", "ignore")) + self.log.error(u"Tor controller connect error: %s" % Debug.formatException(str(err).decode("utf8", "ignore"))) self.enabled = False return self.conn @@ -185,25 +229,37 @@ def startOnions(self): if self.enabled: self.log.debug("Start onions") self.start_onions = True + self.getOnion("global") # Get new exit node ip def resetCircuits(self): res = self.request("SIGNAL NEWNYM") if "250 OK" not in res: - self.status = u"Reset circuits error (%s)" % res + self.setStatus(u"Reset circuits error (%s)" % res) self.log.error("Tor reset circuits error: %s" % res) def addOnion(self): + if len(self.privatekeys) >= config.tor_hs_limit: + return random.choice([key for key in self.privatekeys.keys() if key != self.site_onions.get("global")]) + + result = self.makeOnionAndKey() + if result: + onion_address, onion_privatekey = result + self.privatekeys[onion_address] = onion_privatekey + self.setStatus(u"OK (%s onions running)" % len(self.privatekeys)) + SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port)) + return onion_address + else: + return False + + def makeOnionAndKey(self): res = self.request("ADD_ONION NEW:RSA1024 port=%s" % self.fileserver_port) match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=RSA1024:(.*?)[\r\n]", res, re.DOTALL) if match: onion_address, onion_privatekey = match.groups() - self.privatekeys[onion_address] = onion_privatekey - self.status = u"OK (%s onion running)" % len(self.privatekeys) - SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port)) - return onion_address + return (onion_address, onion_privatekey) else: - self.status = u"AddOnion error (%s)" % res + self.setStatus(u"AddOnion error (%s)" % res) self.log.error("Tor addOnion error: %s" % res) return False @@ -211,10 +267,10 @@ def delOnion(self, address): res = self.request("DEL_ONION %s" % address) if "250 OK" in res: del self.privatekeys[address] - self.status = "OK (%s onion running)" % len(self.privatekeys) + self.setStatus("OK (%s onion running)" % len(self.privatekeys)) return True else: - self.status = u"DelOnion error (%s)" % res + self.setStatus(u"DelOnion error (%s)" % res) self.log.error("Tor delOnion error: %s" % res) self.disconnect() return False @@ -232,10 +288,12 @@ def send(self, cmd, conn=None): if not conn: conn = self.conn self.log.debug("> %s" % cmd) + back = "" for retry in range(2): try: - conn.send("%s\r\n" % cmd) - back = conn.recv(1024 * 64).decode("utf8", "ignore") + conn.sendall("%s\r\n" % cmd) + while not back.endswith("250 OK\r\n"): + back += conn.recv(1024 * 64).decode("utf8", "ignore") break except Exception, err: self.log.error("Tor send error: %s, reconnecting..." % err) @@ -243,7 +301,8 @@ def send(self, cmd, conn=None): time.sleep(1) self.connect() back = None - self.log.debug("< %s" % back.strip()) + if back: + self.log.debug("< %s" % back.strip()) return back def getPrivatekey(self, address): @@ -253,29 +312,35 @@ def getPublickey(self, address): return CryptRsa.privatekeyToPublickey(self.privatekeys[address]) def getOnion(self, site_address): - with self.lock: - if not self.enabled: - return None - if self.start_onions: # Different onion for every site - onion = self.site_onions.get(site_address) - else: # Same onion for every site - onion = self.site_onions.get("global") - site_address = "global" - if not onion: + if not self.enabled: + return None + + if config.tor == "always": # Different onion for every site + onion = self.site_onions.get(site_address) + else: # Same onion for every site + onion = self.site_onions.get("global") + site_address = "global" + + if not onion: + with self.lock: self.site_onions[site_address] = self.addOnion() onion = self.site_onions[site_address] self.log.debug("Created new hidden service for %s: %s" % (site_address, onion)) - return onion + return onion + + # Creates and returns a + # socket that has connected to the Tor Network def createSocket(self, onion, port): if not self.enabled: return False - self.log.debug("Creating new socket to %s:%s" % (onion, port)) - if config.tor == "always": # Every socket is proxied by default + self.log.debug("Creating new Tor socket to %s:%s" % (onion, port)) + if self.starting: + self.log.debug("Waiting for startup...") + self.event_started.get() + if config.tor == "always": # Every socket is proxied by default, in this mode sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.connect((onion, int(port))) else: sock = socks.socksocket() sock.set_proxy(socks.SOCKS5, self.proxy_ip, self.proxy_port) - sock.connect((onion, int(port))) return sock diff --git a/src/Translate/Translate.py b/src/Translate/Translate.py new file mode 100644 index 000000000..90b070b99 --- /dev/null +++ b/src/Translate/Translate.py @@ -0,0 +1,133 @@ +import os +import json +import logging +import inspect +import re +import cgi +import string + +from Config import config + +translates = [] + + +class EscapeProxy(dict): + # Automatically escape the accessed string values + def __getitem__(self, key): + val = dict.__getitem__(self, key) + if type(val) in (str, unicode): + return cgi.escape(val, quote=True) + elif type(val) is dict: + return EscapeProxy(val) + elif type(val) is list: + return EscapeProxy(enumerate(val)) # Convert lists to dict + else: + return val + + +class Translate(dict): + def __init__(self, lang_dir=None, lang=None): + if not lang_dir: + lang_dir = "src/Translate/languages/" + if not lang: + lang = config.language + self.lang = lang + self.lang_dir = lang_dir + self.setLanguage(lang) + self.formatter = string.Formatter() + + if config.debug: + # Auto reload FileRequest on change + from Debug import DebugReloader + DebugReloader(self.load) + + translates.append(self) + + def setLanguage(self, lang): + self.lang = re.sub("[^a-z-]", "", lang) + self.lang_file = self.lang_dir + "%s.json" % lang + self.load() + + def __repr__(self): + return "" % self.lang + + def load(self): + if self.lang == "en": + data = {} + dict.__init__(self, data) + self.clear() + elif os.path.isfile(self.lang_file): + try: + data = json.load(open(self.lang_file)) + logging.debug("Loaded translate file: %s (%s entries)" % (self.lang_file, len(data))) + except Exception as err: + logging.error("Error loading translate file %s: %s" % (self.lang_file, err)) + data = {} + dict.__init__(self, data) + else: + data = {} + dict.__init__(self, data) + self.clear() + logging.debug("Translate file not exists: %s" % self.lang_file) + + def format(self, s, kwargs, nested=False): + kwargs["_"] = self + if nested: + back = self.formatter.vformat(s, [], kwargs) # PY3 TODO: Change to format_map + return self.formatter.vformat(back, [], kwargs) + else: + return self.formatter.vformat(s, [], kwargs) + + def formatLocals(self, s, nested=False): + kwargs = inspect.currentframe().f_back.f_locals + return self.format(s, kwargs, nested=nested) + + def __call__(self, s, kwargs=None, nested=False, escape=True): + if not kwargs: + kwargs = inspect.currentframe().f_back.f_locals + if escape: + kwargs = EscapeProxy(kwargs) + return self.format(s, kwargs, nested=nested) + + def __missing__(self, key): + return key + + def pluralize(self, value, single, multi): + if value > 1: + return self[single].format(value) + else: + return self[multi].format(value) + + def translateData(self, data, translate_table=None, mode="js"): + if not translate_table: + translate_table = self + + data = data.decode("utf8") + + patterns = [] + for key, val in translate_table.items(): + if key.startswith("_("): # Problematic string: only match if called between _(" ") function + key = key.replace("_(", "").replace(")", "").replace(", ", '", "') + translate_table[key] = "|" + val + patterns.append(re.escape(key)) + + def replacer(match): + target = translate_table[match.group(1)] + if mode == "js": + if target and target[0] == "|": # Strict string match + if match.string[match.start() - 2] == "_": # Only if the match if called between _(" ") function + return '"' + target[1:] + '"' + else: + return '"' + match.group(1) + '"' + return '"' + target + '"' + else: + return match.group(0)[0] + target + match.group(0)[-1] + + if mode == "html": + pattern = '[">](' + "|".join(patterns) + ')["<]' + else: + pattern = '"(' + "|".join(patterns) + ')"' + data = re.sub(pattern, replacer, data) + return data.encode("utf8") + +translate = Translate() diff --git a/src/Translate/__init__.py b/src/Translate/__init__.py new file mode 100644 index 000000000..40f340636 --- /dev/null +++ b/src/Translate/__init__.py @@ -0,0 +1 @@ +from Translate import * \ No newline at end of file diff --git a/src/Translate/languages/da.json b/src/Translate/languages/da.json new file mode 100644 index 000000000..8e6f0845e --- /dev/null +++ b/src/Translate/languages/da.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Tillykke, din port ({0}) er åben.
    Du er nu fuld klient på ZeroNet!", + "Tor mode active, every connection using Onion route.": "TOR er aktiv, alle forbindelser anvender Onions.", + "Successfully started Tor onion hidden services.": "OK. Startede TOR skjult onion service.", + "Unable to start hidden services, please check your config.": "Fejl. Kunne ikke starte TOR skjult onion service. Tjek din opsætning!", + "For faster connections open {0} port on your router.": "Åben port {0} på din router for hurtigere forbindelse.", + "Your connection is restricted. Please, open {0} port on your router": "Begrænset forbindelse. Åben venligst port {0} på din router", + "or configure Tor to become a full member of the ZeroNet network.": "eller opsæt TOR for fuld adgang til ZeroNet!", + + "Select account you want to use in this site:": "Vælg bruger til brug på denne side:", + "currently selected": "nuværende bruger", + "Unique to site": "Unik på siden", + + "Content signing failed": "Signering af indhold fejlede", + "Content publish queued for {0:.0f} seconds.": "Indhold i kø for offentliggørelse i {0:.0f} sekunder.", + "Content published to {0} peers.": "Indhold offentliggjort til {0} klienter.", + "No peers found, but your content is ready to access.": "Ingen klienter fundet, men dit indhold er klar til hentning.", + "Your network connection is restricted. Please, open {0} port": "Din forbindelse er begrænset. Åben venligst port {0}", + "on your router to make your site accessible for everyone.": "på din router for at dele din side med alle.", + "Content publish failed.": "Offentliggørelse af indhold fejlede.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Denne fil er endnu ikke delt færdig. Tidligere indhold kan gå tabt hvis du skriver til filen nu.", + "Write content anyway": "Del indhold alligevel", + "New certificate added:": "Nyt certifikat oprettet:", + "You current certificate:": "Dit nuværende certifikat: ", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Skift certificat til {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certifikat ændret til {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Side klonet", + + "You have successfully changed the web interface's language!": "OK. Du har nu skiftet sprog på web brugergrænsefladen!", + "Due to the browser's caching, the full transformation could take some minute.": "Pga. browser cache kan skift af sprog tage nogle minutter.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Forbindelse til UiServer Websocket blev tabt. Genopretter forbindelse...", + "Connection with UiServer Websocket recovered.": "Forbindelse til UiServer Websocket genoprettet.", + "UiServer Websocket error, please reload the page.": "UiServer Websocket fejl. Genindlæs venligst siden (F5)!", + "   Connecting...": "   Opretter forbindelse...", + "Site size: ": "Side størrelse: ", + "MB is larger than default allowed ": "MB er større end den tilladte default ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Åben side og sæt max side størrelse til \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " filer skal downloades", + " downloaded": " downloadet", + " download failed": " download fejlede", + "Peers found: ": "Klienter fundet: ", + "No peers found": "Ingen klienter fundet", + "Running out of size limit (": "Siden fylder snart for meget (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Ret max side størrelse til \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Max side størrelse ændret til {0}MB", + " New version of this page has just released.
    Reload to see the modified content.": " Ny version af denne side er blevet offentliggjort.
    Genindlæs venligst siden (F5) for at se nyt indhold!", + "This site requests permission:": "Denne side betyder om tilladdelse:", + "_(Accept)": "Tillad" + +} diff --git a/src/Translate/languages/de.json b/src/Translate/languages/de.json new file mode 100644 index 000000000..1cc63b744 --- /dev/null +++ b/src/Translate/languages/de.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gratulation, dein Port {0} ist offen.
    Du bist ein volles Mitglied des ZeroNet Netzwerks!", + "Tor mode active, every connection using Onion route.": "Tor Modus aktiv, jede Verbindung nutzt die Onion Route.", + "Successfully started Tor onion hidden services.": "Tor versteckte Dienste erfolgreich gestartet.", + "Unable to start hidden services, please check your config.": "Nicht möglich versteckte Dienste zu starten.", + "For faster connections open {0} port on your router.": "Für schnellere Verbindungen, öffne Port {0} auf deinem Router.", + "Your connection is restricted. Please, open {0} port on your router": "Deine Verbindung ist eingeschränkt. Bitte öffne Port {0} auf deinem Router", + "or configure Tor to become a full member of the ZeroNet network.": "oder konfiguriere Tor um ein volles Mitglied des ZeroNet Netzwerks zu werden.", + + "Select account you want to use in this site:": "Wähle das Konto, das du auf dieser Seite benutzen willst:", + "currently selected": "aktuell ausgewählt", + "Unique to site": "Eindeutig zur Seite", + + "Content signing failed": "Signierung des Inhalts fehlgeschlagen", + "Content publish queued for {0:.0f} seconds.": "Veröffentlichung des Inhalts um {0:.0f} Sekunden verzögert.", + "Content published to {0} peers.": "Inhalt zu {0} Peers veröffentlicht.", + "No peers found, but your content is ready to access.": "Keine Peers gefunden, aber dein Inhalt ist bereit zum Zugriff.", + "Your network connection is restricted. Please, open {0} port": "Deine Netzwerkverbindung ist beschränkt. Bitte öffne Port {0}", + "on your router to make your site accessible for everyone.": "auf deinem Router um deine Seite für Jeden zugänglich zu machen.", + "Content publish failed.": "Inhalt konnte nicht veröffentlicht werden.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Diese Datei wird noch synchronisiert. Wenn jetzt geschrieben wird geht der vorherige Inhalt verloren.", + "Write content anyway": "Inhalt trotzdem schreiben", + "New certificate added:": "Neues Zertifikat hinzugefügt:", + "You current certificate:": "Dein aktuelles Zertifikat:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Ändere es zu {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Zertifikat geändert zu: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Seite geklont", + + "You have successfully changed the web interface's language!": "Du hast die Sprache des Webinterface erfolgreich geändert!", + "Due to the browser's caching, the full transformation could take some minute.": "Aufgrund des Browsercaches kann die volle Transformation Minuten dauern.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Die Verbindung mit UiServer Websocketist abgebrochen. Neu verbinden...", + "Connection with UiServer Websocket recovered.": "Die Verbindung mit UiServer Websocket wurde wiederhergestellt.", + "UiServer Websocket error, please reload the page.": "UiServer Websocket Fehler, bitte Seite neu laden.", + "   Connecting...": "   Verbinden...", + "Site size: ": "Seitengröße: ", + "MB is larger than default allowed ": "MB ist größer als der erlaubte Standart", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Öffne Seite und setze das Limit auf \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " Dateien müssen noch heruntergeladen werden", + " downloaded": " heruntergeladen", + " download failed": " Herunterladen fehlgeschlagen", + "Peers found: ": "Peers gefunden: ", + "No peers found": "Keine Peers gefunden", + "Running out of size limit (": "Das Speicherlimit ist bald ausgeschöpft (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Limit auf \" + site_info.next_size_limit + \"MB ändern", + "Site size limit changed to {0}MB": "Speicherlimit für diese Seite auf {0}MB geändert", + " New version of this page has just released.
    Reload to see the modified content.": " Neue version dieser Seite wurde gerade veröffentlicht.
    Lade die Seite neu um den geänderten Inhalt zu sehen.", + "This site requests permission:": "Diese Seite fordert rechte:", + "_(Accept)": "Genehmigen" + +} diff --git a/src/Translate/languages/es.json b/src/Translate/languages/es.json new file mode 100644 index 000000000..4cac077b7 --- /dev/null +++ b/src/Translate/languages/es.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "¡Felicidades! tu puerto {0} está abierto.
    ¡Eres un miembro completo de la red Zeronet!", + "Tor mode active, every connection using Onion route.": "Modo Tor activado, cada conexión usa una ruta Onion.", + "Successfully started Tor onion hidden services.": "Tor ha iniciado satisfactoriamente la ocultación de los servicios onion.", + "Unable to start hidden services, please check your config.": "No se puedo iniciar los servicios ocultos, por favor comprueba tu configuración.", + "For faster connections open {0} port on your router.": "Para conexiones más rápidas abre el puerto {0} en tu router.", + "Your connection is restricted. Please, open {0} port on your router": "Tu conexión está limitada. Por favor, abre el puerto {0} en tu router", + "or configure Tor to become a full member of the ZeroNet network.": "o configura Tor para convertirte en un miembro completo de la red ZeroNet.", + + "Select account you want to use in this site:": "Selecciona la cuenta que quieres utilizar en este sitio:", + "currently selected": "actualmente seleccionada", + "Unique to site": "Única para el sitio", + + "Content signing failed": "Firma del contenido fallida", + "Content publish queued for {0:.0f} seconds.": "Publicación de contenido en cola durante {0:.0f} segundos.", + "Content published to {0} peers.": "Contenido publicado para {0} pares.", + "No peers found, but your content is ready to access.": "No se ha encontrado pares, pero tu contenido está listo para ser accedido.", + "Your network connection is restricted. Please, open {0} port": "Tu conexión de red está restringida. Por favor, abre el puerto{0}", + "on your router to make your site accessible for everyone.": "en tu router para hacer tu sitio accesible a todo el mundo.", + "Content publish failed.": "Publicación de contenido fallida.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Este archivo está aún sincronizado, si le escribes ahora el contenido previo podría perderse.", + "Write content anyway": "Escribir el contenido de todas formas", + "New certificate added:": "Nuevo certificado añadido:", + "You current certificate:": "Tu certificado actual:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Cambia esto a {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificado cambiado a: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Sitio clonado", + + "You have successfully changed the web interface's language!": "¡Has cambiado con éxito el idioma de la interfaz web!", + "Due to the browser's caching, the full transformation could take some minute.": "Debido a la caché del navegador, la transformación completa podría llevar unos minutos.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Se perdió la conexión con UiServer Websocket. Reconectando...", + "Connection with UiServer Websocket recovered.": "Conexión con UiServer Websocket recuperada.", + "UiServer Websocket error, please reload the page.": "Error de UiServer Websocket, por favor recarga la página.", + "   Connecting...": "   Conectando...", + "Site size: ": "Tamaño del sitio: ", + "MB is larger than default allowed ": "MB es más grande de lo permitido por defecto", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Abre tu sitio and establece el límite de tamaño a \" + site_info.next_size_limit + \"MBs", + " files needs to be downloaded": " Los archivos necesitan ser descargados", + " downloaded": " descargados", + " download failed": " descarga fallida", + "Peers found: ": "Pares encontrados: ", + "No peers found": "No se han encontrado pares", + "Running out of size limit (": "Superando el tamaño límite (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Establece ellímite a \" + site_info.next_size_limit + \"MB ändern", + "Site size limit changed to {0}MB": "Límite de tamaño del sitio cambiado a {0}MBs", + " New version of this page has just released.
    Reload to see the modified content.": " Se ha publicado una nueva versión de esta página .
    Recarga para ver el contenido modificado.", + "This site requests permission:": "Este sitio solicita permiso:", + "_(Accept)": "Conceder" + +} diff --git a/src/Translate/languages/fr.json b/src/Translate/languages/fr.json new file mode 100644 index 000000000..b46ef2c38 --- /dev/null +++ b/src/Translate/languages/fr.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Félicitations, le port ({0}) est ouvert.
    Vous êtes maintenant membre de ZeroNet!!", + "Tor mode active, every connection using Onion route.": "Tor actif, toutes les connexions utilisent un routage Onion.", + "Successfully started Tor onion hidden services.": "Tor activé avec succès.", + "Unable to start hidden services, please check your config.": "Impossible d'activer Tor, veuillez vérifier votre configuration.", + "For faster connections open {0} port on your router.": "Pour une meilleure connectivité, ouvrez le port {0} sur votre routeur.", + "Your connection is restricted. Please, open {0} port on your router": "Connectivité limitée. Veuillez ouvrir le port {0} sur votre routeur", + "or configure Tor to become a full member of the ZeroNet network.": "ou configurez Tor afin d'avoir accès aux pairs ZeroNet Onion.", + + "Select account you want to use in this site:": "Sélectionnez le compte que vous voulez utiliser pour ce site:", + "currently selected": "présentement sélectionné", + "Unique to site": "Unique au site", + + "Content signing failed": "Échec à la signature du contenu", + "Content publish queued for {0:.0f} seconds.": "Publication du contenu différée {0:.0f} secondes.", + "Content published to {0} peers.": "Contenu publié à {0} pairs.", + "No peers found, but your content is ready to access.": "Aucun pair trouvé, mais votre contenu est accessible.", + "Your network connection is restricted. Please, open {0} port": "Connectivité limitée. Veuillez ouvrir le port {0}", + "on your router to make your site accessible for everyone.": "sur votre routeur pour que votre site soit accessible à tous.", + "Content publish failed.": "Échec de la publication du contenu.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Ce fichier n'est pas à jour, si vous le modifiez maintenant une version antérieure pourrait être perdue.", + "Write content anyway": "Enregistrer quand même", + "New certificate added:": "Nouveau cetificat ajouté :", + "You current certificate:": "Votre certificat actuel :", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Changer pour {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificat changé pour : {auth_type}/{auth_user_name}@{domain}-ra.", + "Site cloned": "Site cloné", + + "You have successfully changed the web interface's language!": "Vous avez modifié la langue d'affichage avec succès!", + "Due to the browser's caching, the full transformation could take some minute.": "En fonction du cache du navigateur, la modification pourrait prendre quelques minutes.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Connexion avec UiServer Websocket rompue. Reconnexion...", + "Connection with UiServer Websocket recovered.": "Connexion avec UiServer Websocket rétablie.", + "UiServer Websocket error, please reload the page.": "Erreur du UiServer Websocket, veuillez recharger la page.", + "   Connecting...": "   Connexion...", + "Site size: ": "Taille du site : ", + "MB is larger than default allowed ": "MB est plus large que la taille permise par défaut ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Ouvrez le site et augmentez la taille maximale à \" + site_info.next_size_limit + \"MB-ra", + " files needs to be downloaded": " fichiers doivent être téléchargés", + " downloaded": " téléchargés", + " download failed": " échec de téléchargement", + "Peers found: ": "Pairs trouvés: ", + "No peers found": "Aucun pair trouvé", + "Running out of size limit (": "Vous approchez la taille maximale (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Augmentez la taille maximale à \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Taille maximale du site changée à {0}MB", + " New version of this page has just released.
    Reload to see the modified content.": " Une nouvelle version de cette page vient d'être publiée.
    Rechargez pour voir les modifications.", + "This site requests permission:": "Ce site requiert une permission :", + "_(Accept)": "Autoriser" + +} diff --git a/src/Translate/languages/hu.json b/src/Translate/languages/hu.json new file mode 100644 index 000000000..f9487f1d8 --- /dev/null +++ b/src/Translate/languages/hu.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gratulálunk, a portod ({0}) nyitva van.
    Teljes értékű tagja vagy a hálózatnak!", + "Tor mode active, every connection using Onion route.": "Tor mód aktív, minden kapcsolat az Onion hálózaton keresztül történik.", + "Successfully started Tor onion hidden services.": "Sikeresen elindultak a Tor onion titkos szolgáltatások.", + "Unable to start hidden services, please check your config.": "Nem sikerült elindítani a Tor onion szolgáltatásokat. Kérjük, ellenőrizd a beállításokat!", + "For faster connections open {0} port on your router.": "A gyorsabb kapcsolatok érdekében nyisd ki a {0} portot a routereden.", + "Your connection is restricted. Please, open {0} port on your router": "A kapcsolatod korlátozott. Kérjük, nyisd ki a {0} portot a routereden", + "or configure Tor to become a full member of the ZeroNet network.": "vagy állítsd be a Tor kliensed, hogy teljes értékű tagja legyél a hálózatnak!", + + "Select account you want to use in this site:": "Válaszd ki az oldalhoz használt felhasználónevet:", + "currently selected": "jelenleg kijelölt", + "Unique to site": "Egyedi az oldalon", + + "Content signing failed": "Tartalom aláírása sikeretelen", + "Content publish queued for {0:.0f} seconds.": "Tartalom publikálása elhalasztva {0:.0f} másodperccel.", + "Content published to {0} peers.": "Tartalom publikálva {0} fél részére.", + "No peers found, but your content is ready to access.": "Aktív csatlakozási pont nem található, de a tartalmad készen áll a kiszolgálásra.", + "Your network connection is restricted. Please, open {0} port": "A kapcsolatod korlátozott. Kérjük, nyisd ki a {0} portot", + "on your router to make your site accessible for everyone.": "a routereden, hogy az oldalad mindenki számára elérhető legyen.", + "Content publish failed.": "Sikertelen tartalom publikálás.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Ez a fájl még letöltés alatt van, ha most felülírod a korábbi tartalma elveszhet.", + "Write content anyway": "Felülírás", + "New certificate added:": "Új tanúsítvány hozzáadva:", + "You current certificate:": "A jelenlegi tanúsítványod: ", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Változtatás {auth_type}/{auth_user_name}@{domain}-ra", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "A tanúsítvány megváltozott {auth_type}/{auth_user_name}@{domain}-ra.", + "Site cloned": "Az oldal klónozva", + + "You have successfully changed the web interface's language!": "Sikeresen átállítottad a web felület nyelvét!", + "Due to the browser's caching, the full transformation could take some minute.": "A böngésző cache-elése miatt egy pár percig eltarthat a teljes átállás.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Az UiServer Websocket kapcsolat megszakadt. Újracsatlakozás...", + "Connection with UiServer Websocket recovered.": "Az UiServer Websocket kapcsolat visszaállt.", + "UiServer Websocket error, please reload the page.": "UiServer Websocket hiba, töltsd újra az oldalt!", + "   Connecting...": "   Csatlakozás...", + "Site size: ": "Oldal mérete: ", + "MB is larger than default allowed ": "MB nagyobb, mint az engedélyezett ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Az oldal megnyitása és a korlát módosítása \" + site_info.next_size_limit + \"MB-ra", + " files needs to be downloaded": " fájlt kell letölteni", + " downloaded": " letöltve", + " download failed": " letöltés sikertelen", + "Peers found: ": "Talált csatlakozási pontok: ", + "No peers found": "Nincs csatlakozási pont", + "Running out of size limit (": "Az oldal hamarosan eléri a méretkorlátot (", + "Set limit to \" + site_info.next_size_limit + \"MB": "A korlát módosítása \" + site_info.next_size_limit + \"MB-ra", + "Site size limit changed to {0}MB": "A méretkorlát módosítva {0}MB-ra", + " New version of this page has just released.
    Reload to see the modified content.": "Az oldal épp most módosult
    A megváltozott tartalomért töltsd újra!", + "This site requests permission:": "Az oldal megtekintéséhez szükséges jog:", + "_(Accept)": "Engedélyezés" + +} diff --git a/src/Translate/languages/it.json b/src/Translate/languages/it.json new file mode 100644 index 000000000..f3ee5d87a --- /dev/null +++ b/src/Translate/languages/it.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Congratulazioni, la tua porta {0} è aperta.
    Ora sei un membro effettivo della rete ZeroNet!", + "Tor mode active, every connection using Onion route.": "Modalità Tor attiva, ogni connessione sta usando la rete Onion.", + "Successfully started Tor onion hidden services.": "Servizi Tor onion nascosti avviati con successo.", + "Unable to start hidden services, please check your config.": "Impossibile avviare i servizi nascosti. Si prega di controllare la propria configurazione!", + "For faster connections open {0} port on your router.": "Per avere connessioni più veloci aprire la porta {0} sul router.", + "Your connection is restricted. Please, open {0} port on your router": "La tua connessione è limitata. Aprire la porta {0} sul router", + "or configure Tor to become a full member of the ZeroNet network.": "o configurare Tor per diventare membro effettivo della rete ZeroNet!", + + "Select account you want to use in this site:": "Seleziona l'account che vuoi utilizzare per questo sito:", + "currently selected": "attualmente selezionato", + "Unique to site": "Unico sul sito", + + "Content signing failed": "Firma contenuti fallita", + "Content publish queued for {0:.0f} seconds.": "Pubblicazione contenuti in coda per {0:.0f} secondi.", + "Content published to {0} peers.": "Contenuti pubblicati su {0} peer.", + "No peers found, but your content is ready to access.": "Nessun peer trovato, ma i tuoi contenuti sono pronti per l'accesso.", + "Your network connection is restricted. Please, open {0} port": "La tua connessione di rete è limitata. Aprire la porta {0} ", + "on your router to make your site accessible for everyone.": "sul router, per rendere il sito accessibile a chiunque.", + "Content publish failed.": "Pubblicazione contenuti fallita.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Questo file è ancora in sincronizzazione, se viene modificato i contenuti precedenti andranno persi.", + "Write content anyway": "Scrivere comunque i contenuti", + "New certificate added:": "Aggiunto nuovo certificato:", + "You current certificate:": "Il tuo attuale certificato:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Cambiarlo in {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificato cambianto in: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Sito clonato", + + "You have successfully changed the web interface's language!": "Hai cambiato con successo la lingua dell'interfaccia web!", + "Due to the browser's caching, the full transformation could take some minute.": "La trasformazione completa potrebbe richiedre alcuni minuti a causa della cache del browser.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "La connessione con UiServer Websocket è andata persa. Riconnessione...", + "Connection with UiServer Websocket recovered.": "Connessione con UiServer Websocket recuperata.", + "UiServer Websocket error, please reload the page.": "Errore UiServer Websocket, ricaricare la pagina!", + "   Connecting...": "   Connessione...", + "Site size: ": "Dimensione del sito: ", + "MB is larger than default allowed ": "MB è più grande del valore predefinito consentito ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Aprire il sito e impostare la dimensione limite a \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " i file devono essere scaricati", + " downloaded": " scaricati", + " download failed": " scaricamento fallito", + "Peers found: ": "Peers trovati: ", + "No peers found": "Nessun peer trovato", + "Running out of size limit (": "Superato il limite di spazio (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Imposta il limite a \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Limite di spazio cambiato a {0}MB", + " New version of this page has just released.
    Reload to see the modified content.": "E' stata rilasciata una nuova versione di questa pagina
    Ricaricare per vedere il contenuto modificato!", + "This site requests permission:": "Questo sito richiede permessi:", + "_(Accept)": "Concedere" + +} diff --git a/src/Translate/languages/jp.json b/src/Translate/languages/jp.json new file mode 100644 index 000000000..9978acc70 --- /dev/null +++ b/src/Translate/languages/jp.json @@ -0,0 +1,82 @@ +{ + "Peers": "ピア", + "Connected": "接続済み", + "Connectable": "利用可能", + "Connectable peers": "ピアに接続可能", + + "Data transfer": "データ転送", + "Received": "受信", + "Received bytes": "受信バイト数", + "Sent": "送信", + "Sent bytes": "送信バイト数", + + "Files": "ファイル", + "Total": "合計", + "Image": "画像", + "Other": "その他", + "User data": "ユーザーデータ", + + "Size limit": "サイズ制限", + "limit used": "使用上限", + "free space": "フリースペース", + "Set": "セット", + + "Optional files": "オプション ファイル", + "Downloaded": "ダウンロード済み", + "Download and help distribute all files": "ダウンロードしてすべてのファイルの配布を支援する", + "Total size": "合計サイズ", + "Downloaded files": "ダウンロードされたファイル", + + "Database": "データベース", + "search feeds": "フィードを検索する", + "{feeds} query": "{フィード} お問い合わせ", + "Reload": "再読込", + "Rebuild": "再ビルド", + "No database found": "データベースが見つかりません", + + "Identity address": "Identity address", + "Change": "編集", + + "Site control": "サイト管理", + "Update": "更新", + "Pause": "一時停止", + "Resume": "再開", + "Delete": "削除", + "Are you sure?": "本当によろしいですか?", + + "Site address": "サイトアドレス", + "Donate": "寄付する", + + "Missing files": "ファイルがありません", + "{} try": "{} 試す", + "{} tries": "{} 試行", + "+ {num_bad_files} more": "+ {num_bad_files} more", + + "This is my site": "This is my site", + "Site title": "サイトタイトル", + "Site description": "サイトの説明", + "Save site settings": "サイトの設定を保存する", + + "Content publishing": "コンテンツを公開する", + "Choose": "選択", + "Sign": "Sign", + "Publish": "公開する", + + "This function is disabled on this proxy": "この機能はこのプロキシで無効になっています", + "GeoLite2 City database download error: {}!
    Please download manually and unpack to data dir:
    {}": "GeoLite2 Cityデータベースのダウンロードエラー: {}!
    手動でダウンロードして、フォルダに解凍してください。:
    {}", + "Downloading GeoLite2 City database (one time only, ~20MB)...": "GeoLite2 Cityデータベースの読み込み (これは一度だけ行われます, ~20MB)...", + "GeoLite2 City database downloaded!": "GeoLite2 Cityデータベースがダウンロードされました!", + + "Are you sure?": "本当によろしいですか?", + "Site storage limit modified!": "サイトの保存容量の制限が変更されました!", + "Database schema reloaded!": "データベーススキーマがリロードされました!", + "Database rebuilding....": "データベースの再構築中....", + "Database rebuilt!": "データベースが再構築されました!", + "Site updated!": "サイトが更新されました!", + "Delete this site": "このサイトを削除する", + "File write error: ": "ファイル書き込みエラー:", + "Site settings saved!": "サイト設定が保存されました!", + "Enter your private key:": "秘密鍵を入力してください:", + " Signed!": " Signed!", + "WebGL not supported": "WebGLはサポートされていません" +} \ No newline at end of file diff --git a/src/Translate/languages/nl.json b/src/Translate/languages/nl.json new file mode 100644 index 000000000..985cce7a3 --- /dev/null +++ b/src/Translate/languages/nl.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gefeliciteerd, je poort {0} is geopend.
    Je bent een volledig lid van het ZeroNet netwerk!", + "Tor mode active, every connection using Onion route.": "Tor modus actief, elke verbinding gebruikt een Onion route.", + "Successfully started Tor onion hidden services.": "Tor onion verborgen diensten zijn met succes gestart.", + "Unable to start hidden services, please check your config.": "Het was niet mogelijk om verborgen diensten te starten, controleer je configuratie.", + "For faster connections open {0} port on your router.": "Voor snellere verbindingen open je de poort {0} op je router.", + "Your connection is restricted. Please, open {0} port on your router": "Je verbinding is beperkt. Open altjeblieft poort {0} op je router", + "or configure Tor to become a full member of the ZeroNet network.": "of configureer Tor om een volledig lid van het ZeroNet netwerk te worden.", + + "Select account you want to use in this site:": "Selecteer het account die je wilt gebruiken binnen deze site:", + "currently selected": "huidige selectie", + "Unique to site": "Uniek voor deze site", + + "Content signing failed": "Inhoud ondertekenen mislukt", + "Content publish queued for {0:.0f} seconds.": "Publiceren van inhoud staat in de wachtrij voor {0:.0f} seconden.", + "Content published to {0} peers.": "Inhoud is gepubliceerd naar {0} peers", + "No peers found, but your content is ready to access.": "Geen peers gevonden, maar je inhoud is klaar voor toegang.", + "Your network connection is restricted. Please, open {0} port": "Je netwerkverbinding is beperkt. Open alsjeblieft poort {0}", + "on your router to make your site accessible for everyone.": "op je router om je site toegankelijk te maken voor iedereen.", + "Content publish failed.": "Inhoud publicatie mislukt.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Dit bestand is nog in sync, als je het nu overschrijft, dan is mogelijk de vorige inhoud verloren.", + "Write content anyway": "Inhoud toch schrijven", + "New certificate added:": "Nieuw certificaat toegevoegd:", + "You current certificate:": "Je huidige certificaat:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Verander het naar {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificaat veranderd naar: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Site gecloned", + + "You have successfully changed the web interface's language!": "Je hebt met succes de taal van de web interface aangepast!", + "Due to the browser's caching, the full transformation could take some minute.": "Door caching van je browser kan de volledige transformatie enkele minuten duren.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Verbinding met UiServer Websocket verbroken. Opnieuw verbinden...", + "Connection with UiServer Websocket recovered.": "Verbinding met UiServer Websocket hersteld.", + "UiServer Websocket error, please reload the page.": "UiServer Websocket fout, herlaad alsjeblieft de pagina.", + "   Connecting...": "   Verbinden...", + "Site size: ": "Site grootte ", + "MB is larger than default allowed ": "MB is groter dan de standaard toegestaan ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Open de site en stel de limeit op de grootte in op \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " bestanden moeten worden gedownload", + " downloaded": " gedownload", + " download failed": " download mislukt", + "Peers found: ": "Peers gevonden: ", + "No peers found": "Geen peers gevonden", + "Running out of size limit (": "Limeit op grootte bereikt (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Stel limiet in op \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Site limiet op grootte is veranderd naar {0}MB", + " New version of this page has just released.
    Reload to see the modified content.": " Een nieuwe versie van deze pagina is zojuist uitgekomen.
    Herlaad de pagina om de bijgewerkte inhoud te zien.", + "This site requests permission:": "Deze site vraagt om permissie:", + "_(Accept)": "Toekennen" + +} diff --git a/src/Translate/languages/pl.json b/src/Translate/languages/pl.json new file mode 100644 index 000000000..75caeceb5 --- /dev/null +++ b/src/Translate/languages/pl.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Gratulacje, twój port {0} jest otwarty.
    Jesteś pełnoprawnym użytkownikiem sieci ZeroNet!", + "Tor mode active, every connection using Onion route.": "Tryb Tor aktywny, każde połączenie przy użyciu trasy Cebulowej.", + "Successfully started Tor onion hidden services.": "Pomyślnie zainicjowano ukryte usługi cebulowe Tor.", + "Unable to start hidden services, please check your config.": "Niezdolny do uruchomienia ukrytych usług, proszę sprawdź swoją konfigurację.", + "For faster connections open {0} port on your router.": "Dla szybszego połączenia otwórz {0} port w swoim routerze.", + "Your connection is restricted. Please, open {0} port on your router": "Połączenie jest ograniczone. Proszę, otwórz port {0} w swoim routerze", + "or configure Tor to become a full member of the ZeroNet network.": "bądź skonfiguruj Tora by stać się pełnoprawnym użytkownikiem sieci ZeroNet.", + + "Select account you want to use in this site:": "Wybierz konto którego chcesz użyć na tej stronie:", + "currently selected": "aktualnie wybrany", + "Unique to site": "Unikatowy dla strony", + + "Content signing failed": "Podpisanie treści zawiodło", + "Content publish queued for {0:.0f} seconds.": "Publikacja treści wstrzymana na {0:.0f} sekund(y).", + "Content published to {0} peers.": "Treść opublikowana do {0} uzytkowników równorzednych.", + "No peers found, but your content is ready to access.": "Nie odnaleziono użytkowników równorzędnych, ale twoja treść jest dostępna.", + "Your network connection is restricted. Please, open {0} port": "Twoje połączenie sieciowe jest ograniczone. Proszę, otwórz port {0}", + "on your router to make your site accessible for everyone.": "w swoim routerze, by twoja strona mogłabyć dostępna dla wszystkich.", + "Content publish failed.": "Publikacja treści zawiodła.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Ten plik wciąż się synchronizuje, jeśli zapiszesz go teraz, poprzednia treść może zostać utracona.", + "Write content anyway": "Zapisz treść mimo wszystko", + "New certificate added:": "Nowy certyfikat dodany:", + "You current certificate:": "Twój aktualny certyfikat: ", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Zmień na {auth_type}/{auth_user_name}@{domain}-ra", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certyfikat zmieniony na {auth_type}/{auth_user_name}@{domain}-ra.", + "Site cloned": "Strona sklonowana", + + "You have successfully changed the web interface's language!": "Pomyślnie zmieniono język interfejsu stron!", + "Due to the browser's caching, the full transformation could take some minute.": "Ze względu na buforowanie przeglądarki, pełna zmiana może zająć parę minutę.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Połączenie z UiServer Websocket zostało przerwane. Ponowne łączenie...", + "Connection with UiServer Websocket recovered.": "Połączenie z UiServer Websocket przywrócone.", + "UiServer Websocket error, please reload the page.": "Błąd UiServer Websocket, prosze odświeżyć stronę.", + "   Connecting...": "   Łączenie...", + "Site size: ": "Rozmiar strony: ", + "MB is larger than default allowed ": "MB jest większy niż domyślnie dozwolony ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Otwórz stronę i ustaw limit na \" + site_info.next_size_limit + \"MBów", + " files needs to be downloaded": " pliki muszą zostać ściągnięte", + " downloaded": " ściągnięte", + " download failed": " ściąganie nie powiodło się", + "Peers found: ": "Odnaleziono użytkowników równorzednych: ", + "No peers found": "Nie odnaleziono użytkowników równorzędnych", + "Running out of size limit (": "Limit rozmiaru na wyczerpaniu (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Ustaw limit na \" + site_info.next_size_limit + \"MBów", + "Site size limit changed to {0}MB": "Rozmiar limitu strony zmieniony na {0}MBów", + " New version of this page has just released.
    Reload to see the modified content.": "Nowa wersja tej strony właśnie została wydana.
    Odśwież by zobaczyć nową, zmodyfikowaną treść strony.", + "This site requests permission:": "Ta strona wymaga uprawnień:", + "_(Accept)": "Przyznaj uprawnienia" + +} diff --git a/src/Translate/languages/pt-br.json b/src/Translate/languages/pt-br.json new file mode 100644 index 000000000..a842684f6 --- /dev/null +++ b/src/Translate/languages/pt-br.json @@ -0,0 +1,57 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Parabéns, a porta{0} está aberta.
    Você é um membro completo da rede ZeroNet!", + "Tor mode active, every connection using Onion route.": "Modo Tor ativado, todas as conexões usam a rota Onion.", + "Successfully started Tor onion hidden services.": "Os serviços ocultos Tor onion foram inciados com sucesso.", + "Unable to start hidden services, please check your config.": "Não foi possível iniciar os serviços ocultos, por favor verifique suas configurações.", + "For faster connections open {0} port on your router.": "Para conexões mais rápidas, abra a porta {0} em seu roteador.", + "Your connection is restricted. Please, open {0} port on your router": "Sua conexão está restrita. Por favor, abra a porta {0} em seu roteador", + "or configure Tor to become a full member of the ZeroNet network.": "ou configure o Tor para se tornar um membro completo da rede ZeroNet.", + + "Select account you want to use in this site:": "Selecione a conta que deseja usar nesse site:", + "currently selected": "atualmente selecionada", + "Unique to site": "Única para o site", + + "Content signing failed": "Assinatura de conteúdo falhou", + "Content publish queued for {0:.0f} seconds.": "Publicação de conteúdo na fila por {0:.0f} segundos.", + "Content published to {0} peers.": "Conteúdo publicado para {0} peers.", + "No peers found, but your content is ready to access.": "Nenhum peer encontrado, mas seu conteúdo está pronto para ser acessado.", + "Your network connection is restricted. Please, open {0} port": "Sua conexão de rede está restrita. Por favor, abra a porta {0}", + "on your router to make your site accessible for everyone.": "em seu roteador para tornar seu site acessível para todos.", + "Content publish failed.": "Publicação de conteúdo falhou.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Esse arquivo ainda está sincronizado, se escreve-lo agora o conteúdo anterior poderá ser perdido.", + "Write content anyway": "Escrever o conteúdo mesmo assim", + "New certificate added:": "Novo certificado adicionado:", + "You current certificate:": "Seu certificado atual:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Alterar para {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certificado alterado para: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Site clonado", + + "You have successfully changed the web interface's language!": "Você alterou o idioma da interface web com sucesso!", + "Due to the browser's caching, the full transformation could take some minute.": "Devido ao cache do navegador, a transformação completa pode levar alguns minutos.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "A conexão com UiServer Websocket foi perdida. Reconectando...", + "Connection with UiServer Websocket recovered.": "Conexão com UiServer Websocket recuperada.", + "UiServer Websocket error, please reload the page.": "Erro de UiServer Websocket, por favor atualize a página.", + "   Connecting...": "   Conectando...", + "Site size: ": "Tamanho do site: ", + "MB is larger than default allowed ": "MB é maior do que o tamanho permitido por padrão", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Abrir site e definir limite de tamanho para \" + site_info.next_size_limit + \"MBs", + " files needs to be downloaded": " os arquivos precisam ser baixados", + " downloaded": " baixados", + " download failed": " falha no download", + "Peers found: ": "Peers encontrados: ", + "No peers found": "Nenhum peer encontrado", + "Running out of size limit (": "Passando do tamanho limite (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Definir limite para \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Limite de tamanho do site alterado para {0}MBs", + " New version of this page has just released.
    Reload to see the modified content.": " Uma nova versão desse site acaba de ser publicada.
    Atualize para ver o conteúdo modificado.", + "This site requests permission:": "Esse site solicita permissão:", + "_(Accept)": "Conceder", + + "Save": "Salvar", + "Trackers announcing": "Trackers anunciando", + "Error": "Erro", + "Done": "Concluído", + "Tracker connection error detected.": "Erro de conexão com tracker foi detectado." + +} diff --git a/src/Translate/languages/ru.json b/src/Translate/languages/ru.json new file mode 100644 index 000000000..96c84b91f --- /dev/null +++ b/src/Translate/languages/ru.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Поздравляем, ваш порт {0} открыт.
    Вы полноценный участник сети ZeroNet!", + "Tor mode active, every connection using Onion route.": "Режим Tor включен, все соединения осуществляются через Tor.", + "Successfully started Tor onion hidden services.": "Скрытый сервис Tor запущено успешно.", + "Unable to start hidden services, please check your config.": "Ошибка при запуске скрытого сервиса, пожалуйста проверьте настройки", + "For faster connections open {0} port on your router.": "Для более быстрой работы сети откройте {0} порт на вашем роутере.", + "Your connection is restricted. Please, open {0} port on your router": "Подключение ограничено. Пожалуйста откройте {0} порт на вашем роутере", + "or configure Tor to become a full member of the ZeroNet network.": "или настройте Tor что бы стать полноценным участником сети ZeroNet.", + + "Select account you want to use in this site:": "Выберите аккаунт для использования на этом сайте:", + "currently selected": "сейчас выбран", + "Unique to site": "Уникальный для этого сайта", + + "Content signing failed": "Подпись контента не удалась", + "Content publish queued for {0:.0f} seconds.": "Публикация контента поставлена в очередь {0:.0f} секунд.", + "Content published to {0} peers.": "Контент опубликован на {0} пирах.", + "No peers found, but your content is ready to access.": "Пиры не найдены, но ваш контент доступен.", + "Your network connection is restricted. Please, open {0} port": "Ваше подключение ограничено. Пожалуйста откройте {0} порт. ", + "on your router to make your site accessible for everyone.": "на вашем роутере, что бы ваш сайт стал доступнг посетителям.", + "Content publish failed.": "Ошибка при публикации контента.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Этот файл всё еще синхронизируется, если продолжить его изменение, предыдущий контент может быть потерян.", + "Write content anyway": "Записать контент в любом случае", + "New certificate added:": "Добавлен новый сертификат:", + "You current certificate:": "Ваш текущий сертификат: ", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Изменить его на {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Сертификат изменен на: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Сайт склонирован", + + "You have successfully changed the web interface's language!": "Язык интерфейса успешно изменен!", + "Due to the browser's caching, the full transformation could take some minute.": "В зависимости от работы вашего браузера полное преобразование может занять пару минут.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Подключение к UiServer Websocket прервано. Переподключаюсь...", + "Connection with UiServer Websocket recovered.": "Подключение к UiServer Websocket восстановлено.", + "UiServer Websocket error, please reload the page.": "Ошибка UiServer Websocket, перезагрузите страницу!", + "   Connecting...": "   Подключение...", + "Site size: ": "Размер сайта: ", + "MB is larger than default allowed ": "MB больше чем разрешено по умолчанию ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Открыть сайт и установить лимит занимаемого места на \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " файлы должны быть загружены", + " downloaded": " загружено", + " download failed": " ошибка загрузки", + "Peers found: ": "Пиров найдено: ", + "No peers found": "Пиры не найдены", + "Running out of size limit (": "Доступное место закончилось (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Установить лимит на \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Лимит памяти на диске изменен на {0}MB", + " New version of this page has just released.
    Reload to see the modified content.": "Доступна новая версия данной страницы
    Обновите страницу, что бы увидеть изменения!", + "This site requests permission:": "Данный сайт запрашивает разрешения:", + "_(Accept)": "Предоставить" + +} diff --git a/src/Translate/languages/sk.json b/src/Translate/languages/sk.json new file mode 100644 index 000000000..8fb4554bd --- /dev/null +++ b/src/Translate/languages/sk.json @@ -0,0 +1,57 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Blahoželáme, váš port {0} je otvorený.
    Ste úplným členom siete ZeroNet!", + "Tor mode active, every connection using Onion route.": "Tor mód aktívny, všetky spojenia teraz používajú Onion sieť.", + "Successfully started Tor onion hidden services.": "Tor úspešne spustený.", + "Unable to start hidden services, please check your config.": "Nebolo možné spustiť Tor, prosím skontrolujte nastavenia.", + "For faster connections open {0} port on your router.": "Pre rýchlejšie spojenie otvorte na vašom routery port {0}", + "Your connection is restricted. Please, open {0} port on your router": "Vaše pripojenie je obmedzené. Prosím otvorte port {0} na vašom routery.", + "or configure Tor to become a full member of the ZeroNet network.": "alebo nastavte Tor aby ste sa tali plným členom siete ZeroNet.", + + "Select account you want to use in this site:": "Zvoľte účet ktorý chcete používať na tejto stránke:", + "currently selected": "aktuálne zvolené", + "Unique to site": "Unikátny pre stránku", + + "Content signing failed": "Podpísanie obsahu zlyhalo", + "Content publish queued for {0:.0f} seconds.": "Podpísanie obsahu bude na rade za {0:.0f} sekúnd", + "Content published to {0} peers.": "Obsah publikovaný {0} peer-erom", + "No peers found, but your content is ready to access.": "Neboli nájdený žiadny peer-ery, ale váš obsah je pripravený pre prístup.", + "Your network connection is restricted. Please, open {0} port": "Vaše pripojenie k sieti je obmedzené. Prosím otvorte port {0} na vašom routery.", + "on your router to make your site accessible for everyone.": "na vašom routery aby bola vaša stránka prístupná pre všetkých.", + "Content publish failed.": "Publikovanie obsahu zlyhalo.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Tento súbor sa stále synchronizuje, ak v ňom spravíte zmeny, predchádzajúci obsah sa môže stratiť.", + "Write content anyway": "Aj tak spraviť zmeny", + "New certificate added:": "Pridaný nový certifikát:", + "You current certificate:": "Váš aktuálny certifikát:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Zvoľte to na {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Certifikát zmenený na: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Stránka naklonovaná", + + "You have successfully changed the web interface's language!": "Úspešne ste zmenili jazyk webového rozhrania!", + "Due to the browser's caching, the full transformation could take some minute.": "Kôli cachu webového prehliadavača, ceľková transformácia môže chvíĺu trvať.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Spojenie s UiServer Websocket bolo stratené. Znovu pripájame...", + "Connection with UiServer Websocket recovered.": "Spojenie s UiServer Websocket obnovené.", + "UiServer Websocket error, please reload the page.": "Chyba UiServer Websocket-u, prosím znovu načítajte stránku.", + "   Connecting...": "   Pripájanie...", + "Site size: ": "Veľkosť stránky: ", + "MB is larger than default allowed ": "MB je viac ako povolená hodnota", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Otvoriť stránku a nastaviť limit veľkosti na \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " súbory je potrebné stiahnuť", + " downloaded": " stiahnuté", + " download failed": " sťahovanie zlyhalo", + "Peers found: ": "Peer-erov nájdených: ", + "No peers found": "Neboli nájdený žiadny peer-ery", + "Running out of size limit (": "Presahuje povolený limit veľkosti pamäte (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Nastaviť limit na \" + site_info.next_size_limit + \"MB ändern", + "Site size limit changed to {0}MB": "Limit veľkosti pamäte nastavený na {0}MB", + " New version of this page has just released.
    Reload to see the modified content.": " Bola vydaná nová verzia tejto stránky.
    Znovu načítajte túto stránku aby bolo vidieť zmeny.", + "This site requests permission:": "Táto stránka vyžaduje povolenie:", + "_(Accept)": "Udeliť", + + "on": "", + "Oct": "Okt", + "May": "Máj", + "Jun": "Jún", + "Jul": "Júl" + +} diff --git a/src/Translate/languages/sl.json b/src/Translate/languages/sl.json new file mode 100644 index 000000000..2aeb628e9 --- /dev/null +++ b/src/Translate/languages/sl.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Čestitke, vaša vrata {0} so odprta.
    Postali ste polnopravni član ZeroNet omrežja!", + "Tor mode active, every connection using Onion route.": "Način Tor aktiven.", + "Successfully started Tor onion hidden services.": "Storitve Tor uspešno zagnane.", + "Unable to start hidden services, please check your config.": "Ni bilo mogoče zagnati Tor storitev. Preverite nastavitve.", + "For faster connections open {0} port on your router.": "Za hitrejše povezave na svojem usmerjevalniku odprite vrata {0}.", + "Your connection is restricted. Please, open {0} port on your router": "Vaša povezava je omejena. Na svojem usmerjevalniku odprite vrata {0}", + "or configure Tor to become a full member of the ZeroNet network.": "ali nastavite Tor, da postanete polnopravni član ZeroNet omrežja.", + + "Select account you want to use in this site:": "Izberite račun, ki ga želite uporabiti na tem spletnem mestu:", + "currently selected": "trenutno izbrano", + "Unique to site": "Edinstven za spletno mesto", + + "Content signing failed": "Podpisovanje vsebine ni uspelo", + "Content publish queued for {0:.0f} seconds.": "Objava vsebine na čakanju za {0:.0f} sekund.", + "Content published to {0} peers.": "Vsebina objavljena na {0} povezavah.", + "No peers found, but your content is ready to access.": "Ni nobenih povezav, vendar je vaša vsebina pripravljena za dostop.", + "Your network connection is restricted. Please, open {0} port": "Vaša povezava je omejena. Prosimo, odprite vrata {0}", + "on your router to make your site accessible for everyone.": "na vašem usmerjevalniku, da bo vaše spletno mesto dostopno za vse.", + "Content publish failed.": "Objavljanje vsebine ni uspelo.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Ta datoteka se še vedno sinhronizira. Če jo uredite zdaj, se lahko zgodi, da bo prejšnja vsebina izgubljena.", + "Write content anyway": "Vseeno uredi vsebino", + "New certificate added:": "Dodano novo potrdilo:", + "You current certificate:": "Trenutno potrdilo:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "Spremenite ga na {auth_type}/{auth_user_name}@{domain}", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "Potrdilo spremenjeno na: {auth_type}/{auth_user_name}@{domain}.", + "Site cloned": "Stran klonirana", + + "You have successfully changed the web interface's language!": "Uspešno ste spremenili jezik spletnega vmesnika!", + "Due to the browser's caching, the full transformation could take some minute.": "Zaradi predpomnjenja brskalnika lahko popolna preobrazba traja nekaj minut.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "Povezava z UiServer Websocket je bila izgubljena. Ponovno povezovanje ...", + "Connection with UiServer Websocket recovered.": "Povezava z UiServer Websocket je vzpostavljena.", + "UiServer Websocket error, please reload the page.": "Napaka UiServer Websocket. Prosimo osvežite stran.", + "   Connecting...": "   Povezovanje ...", + "Site size: ": "Velikost strani: ", + "MB is larger than default allowed ": "MB je večja od dovoljenih", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Odpri to stran in nastavi omejitev na \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " datotek mora biti prenešenih", + " downloaded": " preneseno", + " download failed": " prenos ni uspel", + "Peers found: ": "Najdene povezave: ", + "No peers found": "Ni najdenih povezav", + "Running out of size limit (": "Zmanjkuje dovoljenega prostora (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Nastavi omejitev na \" + site_info.next_size_limit + \"MB", + "Site size limit changed to {0}MB": "Omejitev strani nastavljena na{0} MB", + " New version of this page has just released.
    Reload to see the modified content.": " Ravnokar je bila objavljena nova različica te strani.
    Osvežite jo, da boste videli novo vsebino.", + "This site requests permission:": "Ta stran zahteva dovoljenja:", + "_(Accept)": "Dovoli" + +} diff --git a/src/Translate/languages/tr.json b/src/Translate/languages/tr.json new file mode 100644 index 000000000..09a1bdb5c --- /dev/null +++ b/src/Translate/languages/tr.json @@ -0,0 +1,51 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "Tebrikler, portunuz ({0}) açık.
    Artık ZeroNet ağına katıldınız!", + "Tor mode active, every connection using Onion route.": "Tor aktif, tüm bağlantılar Onion yönlendircisini kullanıyor.", + "Successfully started Tor onion hidden services.": "Gizli Tor hizmetleri başlatıldı.", + "Unable to start hidden services, please check your config.": "Gizli hizmetler başlatılamadı, lütfen ayarlarınızı kontrol ediniz.", + "For faster connections open {0} port on your router.": "Daha hızlı bağlantı için {0} nolu portu bilgisayarınıza yönlendirin.", + "Your connection is restricted. Please, open {0} port on your router": "Sınırlı bağlantı. Lütfen, {0} nolu portu bilgisayarınıza yönlendirin", + "or configure Tor to become a full member of the ZeroNet network.": "ya da ZeroNet ağına tam olarak katılabilmek için Tor'u kullanın.", + + "Select account you want to use in this site:": "Bu sitede kullanmak için bir hesap seçiniz:", + "currently selected": "kullanılan", + "Unique to site": "Bu site için benzersiz", + + "Content signing failed": "İçerik imzalama başarısız oldu", + "Content publish queued for {0:.0f} seconds.": "İçerik yayımlanmak üzere {0:.0f} saniyedir kuyrukta.", + "Content published to {0} peers.": "İçerik {0} eşe dağıtıldı.", + "No peers found, but your content is ready to access.": "Eş bulunamadı, ama içeriğiniz erişime hazır.", + "Your network connection is restricted. Please, open {0} port": "Sınırlı bağlantı. Lütfen, {0} nolu portu bilgisayarınıza yönlendirin", + "on your router to make your site accessible for everyone.": "böylece sitenizi herkes için erişilebilir yapabilirsiniz", + "Content publish failed.": "İçerik yayımlama başarısız oldu.", + "This file still in sync, if you write it now, then the previous content may be lost.": "Bu dosya hala güncelleniyor, eğer şimdi kaydederseniz, önceki içerik kaybolabilir.", + "Write content anyway": "Yine de kaydet", + "New certificate added:": "Yeni sertifika eklendi:", + "You current certificate:": "Kullanılan sertifikanız:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "{auth_type}/{auth_user_name}@{domain} olarak değiştir.", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "{auth_type}/{auth_user_name}@{domain} olarak değiştirildi", + "Site cloned": "Site klonlandı", + + "You have successfully changed the web interface's language!": "WEB ara yüzü için dil başarıyla değiştirildi!", + "Due to the browser's caching, the full transformation could take some minute.": "Tam dönüşümün sağlanması, tarayıcı önbelleklemesi yüzünden zaman alabilir.", + + "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket ile bağlantı kesildi. Yeniden bağlanılıyor...", + "Connection with UiServer Websocket recovered.": "UiServer Websocket ile bağlantı yeniden kuruldu.", + "UiServer Websocket error, please reload the page.": "UiServer Websocket hatası, lütfen sayfayı yenileyin.", + "   Connecting...": "   Bağlanıyor...", + "Site size: ": "Site boyutu: ", + "MB is larger than default allowed ": "MB izin verilenden fazla ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "Siteyi açın ve boyut sınırını \" + site_info.next_size_limit + \"MB'ye yükseltin", + " files needs to be downloaded": " indirilmesi gereken dosyalar", + " downloaded": " indirildi", + " download failed": " indirme başarısız", + "Peers found: ": "Bulunan eşler: ", + "No peers found": "Eş bulunamadı", + "Running out of size limit (": "Boyut sınırlamasını aştı (", + "Set limit to \" + site_info.next_size_limit + \"MB": "Sınırlamayı \" + site_info.next_size_limit + \"MB'ye yükselt", + "Site size limit changed to {0}MB": "Site boyut sınırlaması {0}MB olarak ayarlandı", + " New version of this page has just released.
    Reload to see the modified content.": " Bu sayfanın yeni versiyonu yayımlandı.
    Değişen içeriği görmek için yeniden yükleyiniz.", + "This site requests permission:": "Bu site bir izin istiyor:", + "_(Accept)": "İzin ver" + +} diff --git a/src/Translate/languages/zh-tw.json b/src/Translate/languages/zh-tw.json new file mode 100644 index 000000000..0ec071b4f --- /dev/null +++ b/src/Translate/languages/zh-tw.json @@ -0,0 +1,54 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "祝賀,你的埠 ({0}) 已經打開。
    你已經是 ZeroNet 網路的正式成員了!", + "Tor mode active, every connection using Onion route.": "Tor 模式啟用,每個連接正在使用洋蔥路由。", + "Successfully started Tor onion hidden services.": "成功啟動 Tor 洋蔥隱藏服務。", + "Unable to start hidden services, please check your config.": "無法打開隱藏服務,請檢查你的配置。", + "For faster connections open {0} port on your router.": "為了更快的連接請在路由器上打開 {0} 埠。", + "Your connection is restricted. Please, open {0} port on your router": "你的連接受限制。請在你的路由器上打開 {0} 埠", + "or configure Tor to become a full member of the ZeroNet network.": "或者配置你的 Tor 來成為 ZeroNet 的正式成員。", + + "Select account you want to use in this site:": "選擇你要在這個網站使用的帳戶:", + "currently selected": "當前選擇", + "Unique to site": "網站獨有身份", + + "Content signing failed": "內容簽署失敗", + "Content publish queued for {0:.0f} seconds.": "內容已加入 {0:.0f} 秒後的發佈隊列。", + "Content published to {0}/{1} peers.": "內容已發佈到 {0}/{1} 個節點。", + "Content published to {0} peers.": "內容已發佈到 {0} 個節點。", + "No peers found, but your content is ready to access.": "找不到節點,但是你的內容已經準備好被訪問。", + "Your network connection is restricted. Please, open {0} port": "你的網路連接受限制。請在你的路由器上打開 {0} 埠", + "on your router to make your site accessible for everyone.": "確保你的網站能被每一個人訪問。", + "Content publish failed.": "內容發佈失敗。", + "This file still in sync, if you write it now, then the previous content may be lost.": "這個檔仍然在同步中,如果你現在寫入它,之前的內容可能會被丟失。", + "Write content anyway": "強制寫入內容", + "New certificate added:": "新證書:", + "You current certificate:": "你當前的證書:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "改變至 {auth_type}/{auth_user_name}@{domain}-ra", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "證書更改至:{auth_type}/{auth_user_name}@{domain}。", + "Site cloned": "網站已克隆", + + "You have successfully changed the web interface's language!": "你已經成功改變了 Web 界面的語言!", + "Due to the browser's caching, the full transformation could take some minute.": "由於你的瀏覽器緩存,完整的翻譯可能需要花幾分鐘。", + + "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket 的連線已丟失。重新連線中...", + "Connection with UiServer Websocket recovered.": "UiServer Websocket 的連線已恢復。", + "UiServer Websocket error, please reload the page.": "UiServer Websocket 錯誤,請重新載入頁面。", + "   Connecting...": "   連線中...", + "Site size: ": "網站大小:", + "MB is larger than default allowed ": "MB 比預設允許的值更大 ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "打開網站並設定大小限制到 \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " 個檔需要下載", + " downloaded": " 已下載", + " download failed": " 下載失敗", + "Peers found: ": "已找到節點:", + "No peers found": "找不到節點", + "Running out of size limit (": "超出大小限制", + "Set limit to \" + site_info.next_size_limit + \"MB": "設定限制到 \" + site_info.next_size_limit + \"MB", + "Cloning site...": "複製網站中...", + "Site cloned": "網站已複製", + "Site size limit changed to {0}MB": "網站大小限制已改變到 {0}MB", + " New version of this page has just released.
    Reload to see the modified content.": " 本頁面的新版本已經發佈。
    重新載入來查看更改後的內容。", + "This site requests permission:": "這個網站的請求許可權:", + "_(Accept)": "授權" + +} diff --git a/src/Translate/languages/zh.json b/src/Translate/languages/zh.json new file mode 100644 index 000000000..16a40b1aa --- /dev/null +++ b/src/Translate/languages/zh.json @@ -0,0 +1,55 @@ +{ + "Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!": "祝贺,您的端口 ({0}) 已经打开。
    您已经是 ZeroNet 网络的正式成员了!", + "Tor mode active, every connection using Onion route.": "Tor 模式启用,每个连接正在使用洋葱路由。", + "Successfully started Tor onion hidden services.": "成功启动 Tor 洋葱隐藏服务。", + "Unable to start hidden services, please check your config.": "无法打开隐藏服务,请检查您的配置。", + "For faster connections open {0} port on your router.": "为了更快的连接请在路由器上打开 {0} 端口。", + "Your connection is restricted. Please, open {0} port on your router": "您的连接受限制。请在您的路由器上打开 {0} 端口", + "or configure Tor to become a full member of the ZeroNet network.": "或者配置您的 Tor 来成为 ZeroNet 的正式成员。", + + "Select account you want to use in this site:": "选择您要在这个网站使用的帐户:", + "No certificate": "没有证书", + "currently selected": "当前选择", + "Unique to site": "网站独有身份", + + "Content signing failed": "内容签名失败", + "Content publish queued for {0:.0f} seconds.": "内容已加入 {0:.0f} 秒后的发布队列。", + "Content published to {0}/{1} peers.": "内容已发布到 {0}/{1} 个节点。", + "Content published to {0} peers.": "内容已发布到 {0} 个节点。", + "No peers found, but your content is ready to access.": "找不到节点,但是您的内容已经准备好被访问。", + "Your network connection is restricted. Please, open {0} port": "您的网络连接受限制。请在您的路由器上打开 {0} 端口", + "on your router to make your site accessible for everyone.": "确保您的站点能被每一个人访问。", + "Content publish failed.": "内容发布失败。", + "This file still in sync, if you write it now, then the previous content may be lost.": "这个文件仍然在同步中,如果您现在写入它,之前的内容可能会被丢失。", + "Write content anyway": "强制写入内容", + "New certificate added:": "新证书:", + "You current certificate:": "您当前的证书:", + "Change it to {auth_type}/{auth_user_name}@{domain}": "更改至 {auth_type}/{auth_user_name}@{domain}-ra", + "Certificate changed to: {auth_type}/{auth_user_name}@{domain}.": "证书更改至:{auth_type}/{auth_user_name}@{domain}。", + "Site cloned": "站点已克隆", + + "You have successfully changed the web interface's language!": "您已经成功更改了 web 界面的语言!", + "Due to the browser's caching, the full transformation could take some minute.": "由于您的浏览器缓存,完整的翻译可能需要花几分钟。", + + "Connection with UiServer Websocket was lost. Reconnecting...": "UiServer Websocket 的连接已丢失。重新连接中...", + "Connection with UiServer Websocket recovered.": "UiServer Websocket 的连接已恢复。", + "UiServer Websocket error, please reload the page.": "UiServer Websocket 错误,请重新加载页面。", + "   Connecting...": "   连接中...", + "Site size: ": "站点大小:", + "MB is larger than default allowed ": "MB 比默认允许的值更大 ", + "Open site and set size limit to \" + site_info.next_size_limit + \"MB": "打开站点并设置大小限制到 \" + site_info.next_size_limit + \"MB", + " files needs to be downloaded": " 个文件需要下载", + " downloaded": " 已下载", + " download failed": " 下载失败", + "Peers found: ": "已找到节点:", + "No peers found": "找不到节点", + "Running out of size limit (": "超出大小限制", + "Set limit to \" + site_info.next_size_limit + \"MB": "设置限制到 \" + site_info.next_size_limit + \"MB", + "Cloning site...": "克隆站点中...", + "Site cloned": "站点已克隆", + "Site size limit changed to {0}MB": "站点大小限制已更改到 {0}MB", + " New version of this page has just released.
    Reload to see the modified content.": " 本页面的新版本已经发布。
    重新加载来查看更改后的内容。", + "This site requests permission:": "这个站点的请求权限:", + "_(Accept)": "授权" + +} diff --git a/src/Ui/UiRequest.py b/src/Ui/UiRequest.py index 5e50b61c4..1a2f4b2a8 100644 --- a/src/Ui/UiRequest.py +++ b/src/Ui/UiRequest.py @@ -5,12 +5,15 @@ import json import cgi +import gevent + from Config import config from Site import SiteManager from User import UserManager from Plugin import PluginManager from Ui.UiWebsocket import UiWebsocket from Crypt import CryptHash +from util import helper status_texts = { 200: "200 OK", @@ -22,6 +25,10 @@ } +class SecurityError(Exception): + pass + + @PluginManager.acceptPlugins class UiRequest(object): @@ -39,24 +46,78 @@ def __init__(self, server, get, env, start_response): self.start_response = start_response # Start response function self.user = None + self.script_nonce = None # Nonce for script tags in wrapper html + + def learnHost(self, host): + self.server.allowed_hosts.add(host) + self.server.log.info("Added %s as allowed host" % host) + + def isHostAllowed(self, host): + if host in self.server.allowed_hosts: + return True + + # Allow any IP address as they are not affected by DNS rebinding + # attacks + if helper.isIp(host): + self.learnHost(host) + return True + + if ":" in host and helper.isIp(host.rsplit(":", 1)[0]): # Test without port + self.learnHost(host) + return True + + if self.isProxyRequest(): # Support for chrome extension proxy + if self.server.site_manager.isDomain(host): + return True + else: + return False + + return False # Call the request handler function base on path def route(self, path): - if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: # Restict Ui access by ip + # Restict Ui access by ip + if config.ui_restrict and self.env['REMOTE_ADDR'] not in config.ui_restrict: return self.error403(details=False) + # Check if host allowed to do request + if not self.isHostAllowed(self.env.get("HTTP_HOST")): + return self.error403("Invalid host: %s" % self.env.get("HTTP_HOST"), details=False) + + # Prepend .bit host for transparent proxy + if self.server.site_manager.isDomain(self.env.get("HTTP_HOST")): + path = re.sub("^/", "/" + self.env.get("HTTP_HOST") + "/", path) path = re.sub("^http://zero[/]+", "/", path) # Remove begining http://zero/ for chrome extension path = re.sub("^http://", "/", path) # Remove begining http for chrome extension .bit access + # Sanitize request url + path = path.replace("\\", "/") + if "../" in path or "./" in path: + return self.error403("Invalid path: %s" % path) + if self.env["REQUEST_METHOD"] == "OPTIONS": - content_type = self.getContentType(path) - self.sendHeader(content_type=content_type) + if "/" not in path.strip("/"): + content_type = self.getContentType("index.html") + else: + content_type = self.getContentType(path) + + extra_headers = {"Access-Control-Allow-Origin": "null"} + + self.sendHeader(content_type=content_type, extra_headers=extra_headers, noscript=True) return "" if path == "/": return self.actionIndex() - elif path.endswith("favicon.ico"): + elif path == "/favicon.ico": return self.actionFile("src/Ui/media/img/favicon.ico") + # Internal functions + elif "/ZeroNet-Internal/" in path: + path = re.sub(".*?/ZeroNet-Internal/", "/", path) + func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function + if func: + return func() + else: + return self.error404(path) # Media elif path.startswith("/uimedia/"): return self.actionUiMedia(path) @@ -64,8 +125,6 @@ def route(self, path): # uimedia within site dir (for chrome extension) path = re.sub(".*?/uimedia/", "/uimedia/", path) return self.actionUiMedia(path) - elif path.startswith("/media"): - return self.actionSiteMedia(path) # Websocket elif path == "/Websocket": return self.actionWebsocket() @@ -74,24 +133,38 @@ def route(self, path): return self.actionDebug() elif path == "/Console" and config.debug: return self.actionConsole() + # Wrapper-less static files + elif path.startswith("/raw/"): + return self.actionSiteMedia(path.replace("/raw", "/media", 1), header_noscript=True) + + elif path.startswith("/add/"): + return self.actionSiteAdd() # Site media wrapper else: if self.get.get("wrapper_nonce"): - return self.actionSiteMedia("/media" + path) # Only serve html files with frame + if self.get["wrapper_nonce"] in self.server.wrapper_nonces: + self.server.wrapper_nonces.remove(self.get["wrapper_nonce"]) + return self.actionSiteMedia("/media" + path) # Only serve html files with frame + else: + self.server.log.warning("Invalid wrapper nonce: %s" % self.get["wrapper_nonce"]) + body = self.actionWrapper(path) else: body = self.actionWrapper(path) if body: return body else: - func = getattr(self, "action" + path.lstrip("/"), None) # Check if we have action+request_path function + func = getattr(self, "action" + path.strip("/"), None) # Check if we have action+request_path function if func: return func() else: return self.error404(path) - # The request is proxied by chrome extension + # The request is proxied by chrome extension or a transparent proxy def isProxyRequest(self): - return self.env["PATH_INFO"].startswith("http://") + return self.env["PATH_INFO"].startswith("http://") or (self.server.allow_trans_proxy and self.server.site_manager.isDomain(self.env.get("HTTP_HOST"))) + + def isWebSocketRequest(self): + return self.env.get("HTTP_UPGRADE") == "websocket" def isAjaxRequest(self): return self.env.get("HTTP_X_REQUESTED_WITH") == "XMLHttpRequest" @@ -99,11 +172,19 @@ def isAjaxRequest(self): # Get mime by filename def getContentType(self, file_name): content_type = mimetypes.guess_type(file_name)[0] + + if content_type: + content_type = content_type.lower() + + if file_name.endswith(".css"): # Force correct css content type + content_type = "text/css" + if not content_type: - if file_name.endswith("json"): # Correct json header + if file_name.endswith(".json"): # Correct json header content_type = "application/json" else: content_type = "application/octet-stream" + return content_type # Return: Posted variables @@ -132,46 +213,98 @@ def getCurrentUser(self): self.user = UserManager.user_manager.create() return self.user + def getRequestUrl(self): + if self.isProxyRequest(): + if self.env["PATH_INFO"].startswith("http://zero/"): + return self.env["PATH_INFO"] + else: # Add http://zero to direct domain access + return self.env["PATH_INFO"].replace("http://", "http://zero/", 1) + else: + return self.env["wsgi.url_scheme"] + "://" + self.env["HTTP_HOST"] + self.env["PATH_INFO"] + + def getReferer(self): + referer = self.env.get("HTTP_REFERER") + if referer and self.isProxyRequest() and not referer.startswith("http://zero/"): + return referer.replace("http://", "http://zero/", 1) + else: + return referer + + def isScriptNonceSupported(self): + user_agent = self.env.get("HTTP_USER_AGENT") + if "Edge/" in user_agent: + is_script_nonce_supported = False + elif "Safari/" in user_agent and "Chrome/" not in user_agent: + is_script_nonce_supported = False + else: + is_script_nonce_supported = True + return is_script_nonce_supported + # Send response headers - def sendHeader(self, status=200, content_type="text/html", extra_headers=[]): - headers = [] - headers.append(("Version", "HTTP/1.1")) - headers.append(("Connection", "Keep-Alive")) - headers.append(("Keep-Alive", "max=25, timeout=30")) - if content_type != "text/html": - headers.append(("Access-Control-Allow-Origin", "*")) # Allow json access on non-html files - # headers.append(("Content-Security-Policy", "default-src 'self' data: 'unsafe-inline' ws://127.0.0.1:* http://127.0.0.1:* wss://tracker.webtorrent.io; sandbox allow-same-origin allow-top-navigation allow-scripts")) # Only local connections + def sendHeader(self, status=200, content_type="text/html", noscript=False, allow_ajax=False, script_nonce=None, extra_headers=[]): + headers = {} + headers["Version"] = "HTTP/1.1" + headers["Connection"] = "Keep-Alive" + headers["Keep-Alive"] = "max=25, timeout=30" + headers["X-Frame-Options"] = "SAMEORIGIN" + is_referer_allowed = False + if self.env.get("HTTP_REFERER"): + if self.isSameOrigin(self.getReferer(), self.getRequestUrl()): + is_referer_allowed = True + elif self.getReferer() == "%s://%s/" % (self.env["wsgi.url_scheme"], self.env["HTTP_HOST"]): # Origin-only referer + is_referer_allowed = True + if content_type != "text/html" and is_referer_allowed: + headers["Access-Control-Allow-Origin"] = "*" # Allow load font files from css + + if noscript: + headers["Content-Security-Policy"] = "default-src 'none'; sandbox allow-top-navigation allow-forms; img-src 'self'; font-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline';" + elif script_nonce and self.isScriptNonceSupported(): + headers["Content-Security-Policy"] = "default-src 'none'; script-src 'nonce-{0}'; img-src 'self'; style-src 'self' 'unsafe-inline'; connect-src *; frame-src 'self'".format(script_nonce) + + if allow_ajax: + headers["Access-Control-Allow-Origin"] = "null" + if self.env["REQUEST_METHOD"] == "OPTIONS": # Allow json access - headers.append(("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Cookie")) - headers.append(("Access-Control-Allow-Credentials", "true")) + headers["Access-Control-Allow-Headers"] = "Origin, X-Requested-With, Content-Type, Accept, Cookie, Range" + headers["Access-Control-Allow-Credentials"] = "true" if content_type == "text/html": content_type = "text/html; charset=utf-8" + if content_type == "text/plain": + content_type = "text/plain; charset=utf-8" + + # Download instead of display file types that can be dangerous + if re.findall("/svg|/xml|/x-shockwave-flash|/pdf", content_type): + headers["Content-Disposition"] = "attachment" + cacheable_type = ( content_type == "text/css" or content_type.startswith("image") or content_type.startswith("video") or self.env["REQUEST_METHOD"] == "OPTIONS" or content_type == "application/javascript" ) if status in (200, 206) and cacheable_type: # Cache Css, Js, Image files for 10min - headers.append(("Cache-Control", "public, max-age=600")) # Cache 10 min + headers["Cache-Control"] = "public, max-age=600" # Cache 10 min else: - headers.append(("Cache-Control", "no-cache, no-store, private, must-revalidate, max-age=0")) # No caching at all - headers.append(("Content-Type", content_type)) - for extra_header in extra_headers: - headers.append(extra_header) - return self.start_response(status_texts[status], headers) + headers["Cache-Control"] = "no-cache, no-store, private, must-revalidate, max-age=0" # No caching at all + headers["Content-Type"] = content_type + headers.update(extra_headers) + return self.start_response(status_texts[status], headers.items()) # Renders a template def render(self, template_path, *args, **kwargs): - template = open(template_path).read().decode("utf8") - return template.format(**kwargs).encode("utf8") + template = open(template_path).read() + def renderReplacer(m): + return "%s" % kwargs.get(m.group(1), "") + + template_rendered = re.sub("{(.*?)}", renderReplacer, template) + + return template_rendered.encode("utf8") # - Actions - # Redirect to an url def actionRedirect(self, url): - self.start_response('301 Redirect', [('Location', url)]) + self.start_response('301 Redirect', [('Location', str(url))]) yield "Location changed: %s" % url def actionIndex(self): @@ -180,17 +313,36 @@ def actionIndex(self): # Render a file from media with iframe site wrapper def actionWrapper(self, path, extra_headers=None): if not extra_headers: - extra_headers = [] + extra_headers = {} + script_nonce = self.getScriptNonce() match = re.match("/(?P
    [A-Za-z0-9\._-]+)(?P/.*|$)", path) + just_added = False if match: address = match.group("address") inner_path = match.group("inner_path").lstrip("/") - if "." in inner_path and not inner_path.endswith(".html"): - return self.actionSiteMedia("/media" + path) # Only serve html files with frame + + if not inner_path or path.endswith("/"): # It's a directory + content_type = self.getContentType("index.html") + else: # It's a file + content_type = self.getContentType(inner_path) + + is_html_file = "html" in content_type or "xhtml" in content_type + + if not is_html_file: + return self.actionSiteMedia("/media" + path) # Serve non-html files without wrapper + if self.isAjaxRequest(): return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper + if self.isWebSocketRequest(): + return self.error403("WebSocket request not allowed to load wrapper") # No websocket + + if "text/html" not in self.env.get("HTTP_ACCEPT", ""): + return self.error403("Invalid Accept header to load wrapper") + if "prefetch" in self.env.get("HTTP_X_MOZ", "") or "prefetch" in self.env.get("HTTP_PURPOSE", ""): + return self.error403("Prefetch not allowed to load wrapper") + site = SiteManager.site_manager.get(address) if ( @@ -200,19 +352,53 @@ def actionWrapper(self, path, extra_headers=None): title = site.content_manager.contents["content.json"]["title"] else: title = "Loading %s..." % address - site = SiteManager.site_manager.need(address) # Start download site + site = SiteManager.site_manager.get(address) + if site: # Already added, but not downloaded + if time.time() - site.announcer.time_last_announce > 5: + site.log.debug("Reannouncing site...") + gevent.spawn(site.update, announce=True) + else: # If not added yet + site = SiteManager.site_manager.need(address) + just_added = True if not site: return False - self.sendHeader(extra_headers=extra_headers[:]) - return iter([self.renderWrapper(site, path, inner_path, title, extra_headers)]) - # Dont know why wrapping with iter necessary, but without it around 100x slower + self.sendHeader(extra_headers=extra_headers, script_nonce=script_nonce) + + min_last_announce = (time.time() - site.announcer.time_last_announce) / 60 + if min_last_announce > 60 and site.settings["serving"] and not just_added: + site.log.debug("Site requested, but not announced recently (last %.0fmin ago). Updating..." % min_last_announce) + gevent.spawn(site.update, announce=True) + + return iter([self.renderWrapper(site, path, inner_path, title, extra_headers, script_nonce=script_nonce)]) + # Make response be sent at once (see https://github.com/HelloZeroNet/ZeroNet/issues/1092) else: # Bad url return False - def renderWrapper(self, site, path, inner_path, title, extra_headers): + def getSiteUrl(self, address): + if self.isProxyRequest(): + return "http://zero/" + address + else: + return "/" + address + + def processQueryString(self, site, query_string): + match = re.search("zeronet_peers=(.*?)(&|$)", query_string) + if match: + query_string = query_string.replace(match.group(0), "") + num_added = 0 + for peer in match.group(1).split(","): + if not re.match(".*?:[0-9]+$", peer): + continue + ip, port = peer.rsplit(":", 1) + if site.addPeer(ip, int(port), source="query_string"): + num_added += 1 + site.log.debug("%s peers added by query string" % num_added) + + return query_string + + def renderWrapper(self, site, path, inner_path, title, extra_headers, show_loadingscreen=None, script_nonce=None): file_inner_path = inner_path if not file_inner_path: file_inner_path = "index.html" # If inner path defaults to index.html @@ -222,22 +408,34 @@ def renderWrapper(self, site, path, inner_path, title, extra_headers): address = re.sub("/.*", "", path.lstrip("/")) if self.isProxyRequest() and (not path or "/" in path[1:]): - file_url = re.sub(".*/", "", inner_path) + if self.env["HTTP_HOST"] == "zero": + root_url = "/" + address + "/" + file_url = "/" + address + "/" + inner_path + else: + file_url = "/" + inner_path + root_url = "/" + else: file_url = "/" + address + "/" + inner_path + root_url = "/" + address + "/" + + if self.isProxyRequest(): + self.server.allowed_ws_origins.add(self.env["HTTP_HOST"]) # Wrapper variable inits - query_string = "" body_style = "" meta_tags = "" postmessage_nonce_security = "false" wrapper_nonce = self.getWrapperNonce() + inner_query_string = self.processQueryString(site, self.env.get("QUERY_STRING", "")) - if self.env.get("QUERY_STRING"): - query_string = "?%s&wrapper_nonce=%s" % (self.env["QUERY_STRING"], wrapper_nonce) + if inner_query_string: + inner_query_string = "?%s&wrapper_nonce=%s" % (inner_query_string, wrapper_nonce) + elif "?" in inner_path: + inner_query_string = "&wrapper_nonce=%s" % wrapper_nonce else: - query_string = "?wrapper_nonce=%s" % wrapper_nonce + inner_query_string = "?wrapper_nonce=%s" % wrapper_nonce if self.isProxyRequest(): # Its a remote proxy request if self.env["REMOTE_ADDR"] == "127.0.0.1": # Local client, the server address also should be 127.0.0.1 @@ -249,20 +447,33 @@ def renderWrapper(self, site, path, inner_path, title, extra_headers): server_url = "" homepage = "/" + config.homepage + user = self.getCurrentUser() + if user: + theme = user.settings.get("theme", "light") + else: + theme = "light" + + themeclass = "theme-%-6s" % re.sub("[^a-z]", "", theme) + if site.content_manager.contents.get("content.json"): # Got content.json content = site.content_manager.contents["content.json"] if content.get("background-color"): - body_style += "background-color: %s;" % \ - cgi.escape(site.content_manager.contents["content.json"]["background-color"], True) + background_color = content.get("background-color-%s" % theme, content["background-color"]) + body_style += "background-color: %s;" % cgi.escape(background_color, True) if content.get("viewport"): meta_tags += '' % cgi.escape(content["viewport"], True) + if content.get("favicon"): + meta_tags += '' % (root_url, cgi.escape(content["favicon"], True)) if content.get("postmessage_nonce_security"): postmessage_nonce_security = "true" - if site.settings.get("own"): - sandbox_permissions = "allow-modals" # For coffeescript compile errors - else: - sandbox_permissions = "" + sandbox_permissions = "" + + if "NOSANDBOX" in site.settings["permissions"]: + sandbox_permissions += " allow-same-origin" + + if show_loadingscreen is None: + show_loadingscreen = not site.storage.isFile(file_inner_path) return self.render( "src/Ui/template/wrapper.html", @@ -274,15 +485,19 @@ def renderWrapper(self, site, path, inner_path, title, extra_headers): title=cgi.escape(title, True), body_style=body_style, meta_tags=meta_tags, - query_string=re.escape(query_string), + query_string=re.escape(inner_query_string), wrapper_key=site.settings["wrapper_key"], + ajax_key=site.settings["ajax_key"], wrapper_nonce=wrapper_nonce, postmessage_nonce_security=postmessage_nonce_security, permissions=json.dumps(site.settings["permissions"]), - show_loadingscreen=json.dumps(not site.storage.isFile(file_inner_path)), + show_loadingscreen=json.dumps(show_loadingscreen), sandbox_permissions=sandbox_permissions, rev=config.rev, - homepage=homepage + lang=config.language, + homepage=homepage, + themeclass=themeclass, + script_nonce=script_nonce ) # Create a new wrapper nonce that allows to get one html file without the wrapper @@ -291,82 +506,104 @@ def getWrapperNonce(self): self.server.wrapper_nonces.append(wrapper_nonce) return wrapper_nonce - # Returns if media request allowed from that referer - def isMediaRequestAllowed(self, site_address, referer): - if not re.sub("^http[s]{0,1}://", "", referer).startswith(self.env["HTTP_HOST"]): + def getScriptNonce(self): + if not self.script_nonce: + self.script_nonce = CryptHash.random(encoding="base64") + + return self.script_nonce + + # Create a new wrapper nonce that allows to get one site + def getAddNonce(self): + add_nonce = CryptHash.random() + self.server.add_nonces.append(add_nonce) + return add_nonce + + def isSameOrigin(self, url_a, url_b): + if not url_a or not url_b: return False - referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address - return referer_path.startswith("/" + site_address) + origin_a = re.sub("http[s]{0,1}://(.*?/.*?/).*", "\\1", url_a) + origin_b = re.sub("http[s]{0,1}://(.*?/.*?/).*", "\\1", url_b) + return origin_a == origin_b # Return {address: 1Site.., inner_path: /data/users.json} from url path def parsePath(self, path): + path = path.replace("\\", "/") path = path.replace("/index.html/", "/") # Base Backward compatibility fix if path.endswith("/"): path = path + "index.html" - match = re.match("/media/(?P
    [A-Za-z0-9\._-]+)/(?P.*)", path) + if "../" in path or "./" in path: + raise SecurityError("Invalid path") + + match = re.match("/media/(?P
    [A-Za-z0-9]+[A-Za-z0-9\._-]+)(?P/.*|$)", path) if match: path_parts = match.groupdict() path_parts["request_address"] = path_parts["address"] # Original request address (for Merger sites) + path_parts["inner_path"] = path_parts["inner_path"].lstrip("/") + if not path_parts["inner_path"]: + path_parts["inner_path"] = "index.html" return path_parts else: return None - # Serve a media for site - def actionSiteMedia(self, path): - path_parts = self.parsePath(path) + def actionSiteMedia(self, path, header_length=True, header_noscript=False): + try: + path_parts = self.parsePath(path) + except SecurityError as err: + return self.error403(err) + + if not path_parts: + return self.error404(path) - # Check wrapper nonce - content_type = self.getContentType(path) - if "htm" in content_type: # Valid nonce must present to render html files - wrapper_nonce = self.get.get("wrapper_nonce") - if wrapper_nonce not in self.server.wrapper_nonces: - return self.error403("Wrapper nonce error. Please reload the page.") - self.server.wrapper_nonces.remove(self.get["wrapper_nonce"]) + address = path_parts["address"] + file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"]) - referer = self.env.get("HTTP_REFERER") - if referer and path_parts: # Only allow same site to receive media - if not self.isMediaRequestAllowed(path_parts["request_address"], referer): - self.log.error("Media referrer error: %s not allowed from %s" % (path_parts["address"], referer)) - return self.error403("Media referrer error") # Referrer not starts same address as requested path - - if path_parts: # Looks like a valid path - address = path_parts["address"] - file_path = "%s/%s/%s" % (config.data_dir, address, path_parts["inner_path"]) - allowed_dir = os.path.abspath("%s/%s" % (config.data_dir, address)) # Only files within data/sitehash allowed - data_dir = os.path.abspath(config.data_dir) # No files from data/ allowed - if ( - ".." in file_path or - not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir) or - allowed_dir == data_dir - ): # File not in allowed path - return self.error403() + if config.debug and file_path.split("/")[-1].startswith("all."): + # If debugging merge *.css to all.css and *.js to all.js + site = self.server.sites.get(address) + if site and site.settings["own"]: + from Debug import DebugMedia + DebugMedia.merge(file_path) + + if not address or address == ".": + return self.error403(path_parts["inner_path"]) + + header_allow_ajax = False + if self.get.get("ajax_key"): + site = SiteManager.site_manager.get(path_parts["request_address"]) + if self.get["ajax_key"] == site.settings["ajax_key"]: + header_allow_ajax = True else: - if config.debug and file_path.split("/")[-1].startswith("all."): - # If debugging merge *.css to all.css and *.js to all.js - site = self.server.sites.get(address) - if site.settings["own"]: - from Debug import DebugMedia - DebugMedia.merge(file_path) - if os.path.isfile(file_path): # File exists - return self.actionFile(file_path) - elif os.path.isdir(file_path): # If this is actually a folder, add "/" and redirect - return self.actionRedirect("./{0}/".format(path_parts["inner_path"].split("/")[-1])) - else: # File not exists, try to download - site = SiteManager.site_manager.need(address, all_file=False) - result = site.needFile(path_parts["inner_path"], priority=5) # Wait until file downloads - if result: - return self.actionFile(file_path) - else: - self.log.debug("File not found: %s" % path_parts["inner_path"]) - # Site larger than allowed, re-add wrapper nonce to allow reload - if site.settings.get("size", 0) > site.getSizeLimit() * 1024 * 1024: - self.server.wrapper_nonces.append(self.get.get("wrapper_nonce")) - return self.error404(path_parts["inner_path"]) + return self.error403("Invalid ajax_key") - else: # Bad url - return self.error404(path) + file_size = helper.getFilesize(file_path) + + if file_size is not None: + return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts) + + elif os.path.isdir(file_path): # If this is actually a folder, add "/" and redirect + if path_parts["inner_path"]: + return self.actionRedirect("./%s/" % path_parts["inner_path"].split("/")[-1]) + else: + return self.actionRedirect("./%s/" % path_parts["address"]) + + else: # File not exists, try to download + if address not in SiteManager.site_manager.sites: # Only in case if site already started downloading + return self.actionSiteAddPrompt(path) + + site = SiteManager.site_manager.need(address) + + if path_parts["inner_path"].endswith("favicon.ico"): # Default favicon for all sites + return self.actionFile("src/Ui/media/img/favicon.ico") + + result = site.needFile(path_parts["inner_path"], priority=15) # Wait until file downloads + if result: + file_size = helper.getFilesize(file_path) + return self.actionFile(file_path, header_length=header_length, header_noscript=header_noscript, header_allow_ajax=header_allow_ajax, file_size=file_size, path_parts=path_parts) + else: + self.log.debug("File not found: %s" % path_parts["inner_path"]) + return self.error404(path_parts["inner_path"]) # Serve a media for ui def actionUiMedia(self, path): @@ -386,19 +623,59 @@ def actionUiMedia(self, path): else: # Bad url return self.error400() + def actionSiteAdd(self): + post = dict(cgi.parse_qsl(self.env["wsgi.input"].read())) + if post["add_nonce"] not in self.server.add_nonces: + return self.error403("Add nonce error.") + self.server.add_nonces.remove(post["add_nonce"]) + SiteManager.site_manager.need(post["address"]) + return self.actionRedirect(post["url"]) + + def actionSiteAddPrompt(self, path): + path_parts = self.parsePath(path) + if not path_parts or not self.server.site_manager.isAddress(path_parts["address"]): + return self.error404(path) + + self.sendHeader(200, "text/html", noscript=True) + template = open("src/Ui/template/site_add.html").read() + template = template.replace("{url}", cgi.escape(self.env["PATH_INFO"], True)) + template = template.replace("{address}", path_parts["address"]) + template = template.replace("{add_nonce}", self.getAddNonce()) + return template + + def replaceHtmlVariables(self, block, path_parts): + user = self.getCurrentUser() + themeclass = "theme-%-6s" % re.sub("[^a-z]", "", user.settings.get("theme", "light")) + block = block.replace("{themeclass}", themeclass.encode("utf8")) + + if path_parts: + site = self.server.sites.get(path_parts.get("address")) + if site.settings["own"]: + modified = int(time.time()) + else: + modified = int(site.content_manager.contents["content.json"]["modified"]) + block = block.replace("{site_modified}", str(modified)) + + return block + # Stream a file to client - def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True): - if os.path.isfile(file_path): + def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_length=True, header_noscript=False, header_allow_ajax=False, file_size=None, file_obj=None, path_parts=None): + if file_size is None: + file_size = helper.getFilesize(file_path) + + if file_size is not None: # Try to figure out content type by extension content_type = self.getContentType(file_path) - # TODO: Dont allow external access: extra_headers= - # [("Content-Security-Policy", "default-src 'unsafe-inline' data: http://localhost:43110 ws://localhost:43110")] range = self.env.get("HTTP_RANGE") range_start = None + + is_html_file = file_path.endswith(".html") + if is_html_file: + header_length = False + if send_header: extra_headers = {} - file_size = os.path.getsize(file_path) extra_headers["Accept-Ranges"] = "bytes" if header_length: extra_headers["Content-Length"] = str(file_size) @@ -414,20 +691,24 @@ def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_l status = 206 else: status = 200 - self.sendHeader(status, content_type=content_type, extra_headers=extra_headers.items()) + self.sendHeader(status, content_type=content_type, noscript=header_noscript, allow_ajax=header_allow_ajax, extra_headers=extra_headers) if self.env["REQUEST_METHOD"] != "OPTIONS": - file = open(file_path, "rb") + if not file_obj: + file_obj = open(file_path, "rb") + if range_start: - file.seek(range_start) + file_obj.seek(range_start) while 1: try: - block = file.read(block_size) + block = file_obj.read(block_size) + if is_html_file: + block = self.replaceHtmlVariables(block, path_parts) if block: yield block else: raise StopIteration except StopIteration: - file.close() + file_obj.close() break else: # File not exists yield self.error404(file_path) @@ -435,22 +716,39 @@ def actionFile(self, file_path, block_size=64 * 1024, send_header=True, header_l # On websocket connection def actionWebsocket(self): ws = self.env.get("wsgi.websocket") + if ws: - wrapper_key = self.get["wrapper_key"] + # Allow only same-origin websocket requests + origin = self.env.get("HTTP_ORIGIN") + host = self.env.get("HTTP_HOST") + # Allow only same-origin websocket requests + if origin: + origin_host = origin.split("://", 1)[-1] + if origin_host != host and origin_host not in self.server.allowed_ws_origins: + ws.send(json.dumps({"error": "Invalid origin: %s" % origin})) + return self.error403("Invalid origin: %s" % origin) + # Find site by wrapper_key + wrapper_key = self.get["wrapper_key"] site = None for site_check in self.server.sites.values(): if site_check.settings["wrapper_key"] == wrapper_key: site = site_check if site: # Correct wrapper key - user = self.getCurrentUser() + try: + user = self.getCurrentUser() + except Exception, err: + self.log.error("Error in data/user.json: %s" % err) + return self.error500() if not user: self.log.error("No user found") return self.error403() ui_websocket = UiWebsocket(ws, site, self.server, user, self) site.websockets.append(ui_websocket) # Add to site websockets to allow notify on events + self.server.websockets.append(ui_websocket) ui_websocket.start() + self.server.websockets.remove(ui_websocket) for site_check in self.server.sites.values(): # Remove websocket from every site (admin sites allowed to join other sites event channels) if ui_websocket in site_check.websockets: @@ -479,6 +777,16 @@ def actionConsole(self): import sys sites = self.server.sites main = sys.modules["main"] + + def bench(code, times=100, init=None): + sites = self.server.sites + main = sys.modules["main"] + s = time.time() + if init: + eval(compile(init, '', 'exec'), globals(), locals()) + for _ in range(times): + back = eval(code, globals(), locals()) + return ["%s run: %.3fs" % (times, time.time() - s), back] raise Exception("Here is your console") # - Tests - @@ -496,30 +804,30 @@ def actionTestStream(self): # Send bad request error def error400(self, message=""): - self.sendHeader(400) + self.sendHeader(400, noscript=True) return self.formatError("Bad Request", message) # You are not allowed to access this def error403(self, message="", details=True): - self.sendHeader(403) - self.log.debug("Error 403: %s" % message) + self.sendHeader(403, noscript=True) + self.log.error("Error 403: %s" % message) return self.formatError("Forbidden", message, details=details) # Send file not found error def error404(self, path=""): - self.sendHeader(404) - return self.formatError("Not Found", cgi.escape(path.encode("utf8")), details=False) + self.sendHeader(404, noscript=True) + return self.formatError("Not Found", path.encode("utf8"), details=False) # Internal server error def error500(self, message=":("): - self.sendHeader(500) - return self.formatError("Server error", cgi.escape(message)) + self.sendHeader(500, noscript=True) + return self.formatError("Server error", message) def formatError(self, title, message, details=True): import sys import gevent - if details: + if details and config.debug: details = {key: val for key, val in self.env.items() if hasattr(val, "endswith") and "COOKIE" not in key} details["version_zeronet"] = "%s r%s" % (config.version, config.rev) details["version_python"] = sys.version @@ -534,22 +842,12 @@ def formatError(self, title, message, details=True):

    %s

    %s

    -

    Please report it if you think this an error.

    +

    Please report it if you think this an error.

    Details:

    %s
    - """ % (title, message, json.dumps(details, indent=4, sort_keys=True)) + """ % (title, cgi.escape(message), cgi.escape(json.dumps(details, indent=4, sort_keys=True))) else: return """

    %s

    %s

    - """ % (title, message) - - -# - Reload for eaiser developing - -# def reload(): - # import imp, sys - # global UiWebsocket - # UiWebsocket = imp.load_source("UiWebsocket", "src/Ui/UiWebsocket.py").UiWebsocket - # reload(sys.modules["User.UserManager"]) - # UserManager.reloadModule() - # self.user = UserManager.user_manager.getCurrent() + """ % (title, cgi.escape(message)) diff --git a/src/Ui/UiServer.py b/src/Ui/UiServer.py index 745ec512d..03d56162c 100644 --- a/src/Ui/UiServer.py +++ b/src/Ui/UiServer.py @@ -3,6 +3,7 @@ import cgi import socket import sys +import gevent from gevent.pywsgi import WSGIServer from gevent.pywsgi import WSGIHandler @@ -56,8 +57,30 @@ def __init__(self): self.ip = config.ui_ip self.port = config.ui_port if self.ip == "*": - self.ip = "" # Bind all + self.ip = "0.0.0.0" # Bind all + if config.ui_host: + self.allowed_hosts = set(config.ui_host) + elif config.ui_ip == "127.0.0.1": + # IP Addresses are inherently allowed as they are immune to DNS + # rebinding attacks. + self.allowed_hosts = set(["zero", "localhost:%s" % config.ui_port]) + # "URI producers and normalizers should omit the port component and + # its ':' delimiter if port is empty or if its value would be the + # same as that of the scheme's default." + # Source: https://tools.ietf.org/html/rfc3986#section-3.2.3 + # As a result, we need to support portless hosts if port 80 is in + # use. + if config.ui_port == 80: + self.allowed_hosts.update(["localhost"]) + else: + self.allowed_hosts = set([]) + self.allow_trans_proxy = config.ui_trans_proxy + self.allowed_ws_origins = set() + self.wrapper_nonces = [] + self.add_nonces = [] + self.websockets = [] + self.site_manager = SiteManager.site_manager self.sites = SiteManager.site_manager.list() self.log = logging.getLogger(__name__) @@ -114,16 +137,20 @@ def start(self): self.log.info("Web interface: http://%s:%s/" % (config.ui_ip, config.ui_port)) self.log.info("--------------------------------------") - if config.open_browser: + if config.open_browser and config.open_browser != "False": logging.info("Opening browser: %s...", config.open_browser) import webbrowser - if config.open_browser == "default_browser": - browser = webbrowser.get() - else: - browser = webbrowser.get(config.open_browser) - browser.open("http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage), new=2) - - self.server = WSGIServer((self.ip.replace("*", ""), self.port), handler, handler_class=UiWSGIHandler, log=self.log) + try: + if config.open_browser == "default_browser": + browser = webbrowser.get() + else: + browser = webbrowser.get(config.open_browser) + url = "http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage) + gevent.spawn_later(0.3, browser.open, url, new=2) + except Exception as err: + print "Error starting browser: %s" % err + + self.server = WSGIServer((self.ip, self.port), handler, handler_class=UiWSGIHandler, log=self.log) self.server.sockets = {} self.afterStarted() try: @@ -155,4 +182,8 @@ def stop(self): self.server.socket.close() self.server.stop() - time.sleep(1) \ No newline at end of file + time.sleep(1) + + def updateWebsocket(self, **kwargs): + for ws in self.websockets: + ws.event("serverChanged", kwargs) diff --git a/src/Ui/UiWebsocket.py b/src/Ui/UiWebsocket.py index 14232ee8e..580ff2af2 100644 --- a/src/Ui/UiWebsocket.py +++ b/src/Ui/UiWebsocket.py @@ -1,10 +1,11 @@ import json import time import sys -import hashlib import os import shutil import re +import copy +import logging import gevent @@ -13,10 +14,20 @@ from Debug import Debug from util import QueryJson, RateLimit from Plugin import PluginManager +from Translate import translate as _ +from util import helper +from util import SafeRe +from Content.ContentManager import VerifyError, SignError @PluginManager.acceptPlugins class UiWebsocket(object): + admin_commands = set([ + "sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteAdd", "siteListModifiedFiles", "siteSetSettingsValue", + "channelJoinAllsite", "serverUpdate", "serverPortcheck", "serverShutdown", "serverShowdirectory", "serverGetWrapperNonce", + "certSet", "certList", "configSet", "permissionAdd", "permissionRemove", "announcerStats", "userSetGlobalSettings" + ]) + async_commands = set(["fileGet", "fileList", "dirList", "fileNeed", "serverPortcheck", "siteListModifiedFiles"]) def __init__(self, ws, site, server, user, request): self.ws = ws @@ -29,7 +40,7 @@ def __init__(self, ws, site, server, user, request): self.next_message_id = 1 self.waiting_cb = {} # Waiting for callback. Key: message_id, Value: function pointer self.channels = [] # Channels joined to - self.sending = False # Currently sending to client + self.state = {"sending": False} # Shared state of websocket connection self.send_queue = [] # Messages to send to client # Start listener loop @@ -39,85 +50,145 @@ def start(self): # Add open fileserver port message or closed port error to homepage at first request after start self.site.page_requested = True # Dont add connection notification anymore file_server = sys.modules["main"].file_server - if file_server.port_opened is None or file_server.tor_manager.start_onions is None: + if not file_server.port_opened or file_server.tor_manager.start_onions is None: self.site.page_requested = False # Not ready yet, check next time - elif file_server.port_opened is True: - self.site.notifications.append([ - "done", - "Congratulation, your port %s is opened.
    You are full member of ZeroNet network!" % - config.fileserver_port, - 10000 - ]) - elif config.tor == "always" and file_server.tor_manager.start_onions: - self.site.notifications.append([ - "done", - """ - Tor mode active, every connection using Onion route.
    - Successfully started Tor onion hidden services. - """, - 10000 - ]) - elif config.tor == "always" and file_server.tor_manager.start_onions is not False: - self.site.notifications.append([ - "error", - """ - Tor mode active, every connection using Onion route.
    - Unable to start hidden services, please check your config. - """, - 0 - ]) - elif file_server.port_opened is False and file_server.tor_manager.start_onions: - self.site.notifications.append([ - "done", - """ - Successfully started Tor onion hidden services.
    - For faster connections open %s port on your router. - """ % config.fileserver_port, - 10000 - ]) else: - self.site.notifications.append([ - "error", - """ - Your connection is restricted. Please, open %s port on your router
    - or configure Tor to become full member of ZeroNet network. - """ % config.fileserver_port, - 0 - ]) + try: + self.addHomepageNotifications() + except Exception, err: + self.log.error("Uncaught Exception: " + Debug.formatException(err)) for notification in self.site.notifications: # Send pending notification messages + # send via WebSocket self.cmd("notification", notification) + # just in case, log them to terminal + if notification[0] == "error": + self.log.error("\n*** %s\n" % self.dedent(notification[1])) + self.site.notifications = [] + while True: try: - message = ws.receive() + if ws.closed: + break + else: + message = ws.receive() except Exception, err: - return "Bye." # Close connection + self.log.error("WebSocket receive error: %s" % Debug.formatException(err)) + break if message: try: - self.handleRequest(message) + req = json.loads(message) + self.handleRequest(req) except Exception, err: if config.debug: # Allow websocket errors to appear on /Debug sys.modules["main"].DebugHook.handleError() - self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err)) - self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) + self.log.error("WebSocket handleRequest error: %s \n %s" % (Debug.formatException(err), message)) + if not self.hasPlugin("Multiuser"): + self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) + + def dedent(self, text): + return re.sub("[\\r\\n\\x20\\t]+", " ", text.strip().replace("
    ", " ")) + + def addHomepageNotifications(self): + if not(self.hasPlugin("Multiuser")) and not(self.hasPlugin("UiPassword")): + bind_ip = getattr(config, "ui_ip", "") + whitelist = getattr(config, "ui_restrict", []) + # binds to the Internet, no IP whitelist, no UiPassword, no Multiuser + if ("0.0.0.0" == bind_ip or "*" == bind_ip) and (not whitelist): + self.site.notifications.append([ + "error", + _(u"You are not going to set up a public gateway. However, your Web UI is
    " + + "open to the whole Internet.
    " + + "Please check your configuration.") + ]) - def hasSitePermission(self, address): + file_server = sys.modules["main"].file_server + if any(file_server.port_opened.values()): + self.site.notifications.append([ + "done", + _["Congratulations, your port {0} is opened.
    You are a full member of the ZeroNet network!"].format(config.fileserver_port), + 10000 + ]) + elif config.tor == "always" and file_server.tor_manager.start_onions: + self.site.notifications.append([ + "done", + _(u""" + {_[Tor mode active, every connection using Onion route.]}
    + {_[Successfully started Tor onion hidden services.]} + """), + 10000 + ]) + elif config.tor == "always" and file_server.tor_manager.start_onions is not False: + self.site.notifications.append([ + "error", + _(u""" + {_[Tor mode active, every connection using Onion route.]}
    + {_[Unable to start hidden services, please check your config.]} + """), + 0 + ]) + elif file_server.tor_manager.start_onions: + self.site.notifications.append([ + "done", + _(u""" + {_[Successfully started Tor onion hidden services.]}
    + {_[For faster connections open {0} port on your router.]} + """).format(config.fileserver_port), + 10000 + ]) + else: + self.site.notifications.append([ + "error", + _(u""" + {_[Your connection is restricted. Please, open {0} port on your router]}
    + {_[or configure Tor to become a full member of the ZeroNet network.]} + """).format(config.fileserver_port), + 0 + ]) + + def hasPlugin(self, name): + return name in PluginManager.plugin_manager.plugin_names + + # Has permission to run the command + def hasCmdPermission(self, cmd): + cmd = cmd[0].lower() + cmd[1:] + + if cmd in self.admin_commands and "ADMIN" not in self.permissions: + return False + else: + return True + + # Has permission to access a site + def hasSitePermission(self, address, cmd=None): if address != self.site.address and "ADMIN" not in self.site.settings["permissions"]: return False else: return True + def hasFilePermission(self, inner_path): + valid_signers = self.site.content_manager.getValidSigners(inner_path) + return self.site.settings["own"] or self.user.getAuthAddress(self.site.address) in valid_signers + # Event in a channel def event(self, channel, *params): if channel in self.channels: # We are joined to channel if channel == "siteChanged": - site = params[0] # Triggerer site - site_info = self.formatSiteInfo(site) + site = params[0] + site_info = self.formatSiteInfo(site, create_user=False) if len(params) > 1 and params[1]: # Extra data site_info.update(params[1]) self.cmd("setSiteInfo", site_info) + elif channel == "serverChanged": + server_info = self.formatServerInfo() + self.cmd("setServerInfo", server_info) + elif channel == "announcerChanged": + site = params[0] + announcer_info = self.formatAnnouncerInfo(site) + if len(params) > 1 and params[1]: # Extra data + announcer_info.update(params[1]) + self.cmd("setAnnouncerInfo", announcer_info) # Send response to client (to = message.id) def response(self, to, result): @@ -133,17 +204,18 @@ def send(self, message, cb=None): self.next_message_id += 1 if cb: # Callback after client responded self.waiting_cb[message["id"]] = cb - if self.sending: - return # Already sending self.send_queue.append(message) + if self.state["sending"]: + return # Already sending try: while self.send_queue: - self.sending = True + self.state["sending"] = True message = self.send_queue.pop(0) self.ws.send(json.dumps(message)) - self.sending = False + self.state["sending"] = False except Exception, err: self.log.debug("Websocket send error: %s" % Debug.formatException(err)) + self.state["sending"] = False def getPermissions(self, req_id): permissions = self.site.settings["permissions"] @@ -152,24 +224,33 @@ def getPermissions(self, req_id): permissions.append("ADMIN") return permissions + def asyncWrapper(self, func): + def asyncErrorWatcher(func, *args, **kwargs): + try: + result = func(*args, **kwargs) + if result is not None: + self.response(args[0], result) + except Exception, err: + if config.debug: # Allow websocket errors to appear on /Debug + sys.modules["main"].DebugHook.handleError() + self.log.error("WebSocket handleRequest error: %s" % Debug.formatException(err)) + self.cmd("error", "Internal error: %s" % Debug.formatException(err, "html")) + + def wrapper(*args, **kwargs): + gevent.spawn(asyncErrorWatcher, func, *args, **kwargs) + return wrapper + # Handle incoming messages - def handleRequest(self, data): - req = json.loads(data) + def handleRequest(self, req): cmd = req.get("cmd") params = req.get("params") self.permissions = self.getPermissions(req["id"]) - admin_commands = ( - "sitePause", "siteResume", "siteDelete", "siteList", "siteSetLimit", "siteClone", - "channelJoinAllsite", "serverUpdate", "serverPortcheck", "serverShutdown", "certSet", "configSet", - "actionPermissionAdd", "actionPermissionRemove" - ) - if cmd == "response": # It's a response to a command return self.actionResponse(req["to"], req["result"]) - elif cmd in admin_commands and "ADMIN" not in self.permissions: # Admin commands - return self.response(req["id"], {"error:", "You don't have permission to run %s" % cmd}) + elif not self.hasCmdPermission(cmd): # Admin commands + return self.response(req["id"], {"error": "You don't have permission to run %s" % cmd}) else: # Normal command func_name = "action" + cmd[0].upper() + cmd[1:] func = getattr(self, func_name, None) @@ -177,19 +258,26 @@ def handleRequest(self, data): self.response(req["id"], {"error": "Unknown command: %s" % cmd}) return + # Execute in parallel + if cmd in self.async_commands: + func = self.asyncWrapper(func) + # Support calling as named, unnamed parameters and raw first argument too if type(params) is dict: - func(req["id"], **params) + result = func(req["id"], **params) elif type(params) is list: - func(req["id"], *params) + result = func(req["id"], *params) elif params: - func(req["id"], params) + result = func(req["id"], params) else: - func(req["id"]) + result = func(req["id"]) + + if result is not None: + self.response(req["id"], result) # Format site info def formatSiteInfo(self, site, create_user=True): - content = site.content_manager.contents.get("content.json") + content = site.content_manager.contents.get("content.json", {}) if content: # Remove unnecessary data transfer content = content.copy() content["files"] = len(content.get("files", {})) @@ -208,7 +296,6 @@ def formatSiteInfo(self, site, create_user=True): ret = { "auth_key": self.site.settings["auth_key"], # Obsolete, will be removed - "auth_key_sha512": hashlib.sha512(self.site.settings["auth_key"]).hexdigest()[0:64], # Obsolete, will be removed "auth_address": self.user.getAuthAddress(site.address, create=create_user), "cert_user_id": self.user.getCertUserId(site.address), "address": site.address, @@ -230,23 +317,47 @@ def formatSiteInfo(self, site, create_user=True): return ret def formatServerInfo(self): + file_server = sys.modules["main"].file_server + if file_server.port_opened == {}: + ip_external = None + else: + ip_external = any(file_server.port_opened.values()) return { - "ip_external": sys.modules["main"].file_server.port_opened, + "ip_external": ip_external, + "port_opened": file_server.port_opened, "platform": sys.platform, "fileserver_ip": config.fileserver_ip, "fileserver_port": config.fileserver_port, - "tor_enabled": sys.modules["main"].file_server.tor_manager.enabled, - "tor_status": sys.modules["main"].file_server.tor_manager.status, + "tor_enabled": file_server.tor_manager.enabled, + "tor_status": file_server.tor_manager.status, + "tor_has_meek_bridges": file_server.tor_manager.has_meek_bridges, + "tor_use_bridges": config.tor_use_bridges, "ui_ip": config.ui_ip, "ui_port": config.ui_port, "version": config.version, "rev": config.rev, + "timecorrection": file_server.timecorrection, + "language": config.language, "debug": config.debug, - "plugins": PluginManager.plugin_manager.plugin_names + "plugins": PluginManager.plugin_manager.plugin_names, + "user_settings": self.user.settings } + def formatAnnouncerInfo(self, site): + return {"address": site.address, "stats": site.announcer.stats} + # - Actions - + def actionAs(self, to, address, cmd, params=[]): + if not self.hasSitePermission(address, cmd=cmd): + return self.response(to, "No permission for site %s" % address) + req_self = copy.copy(self) + req_self.site = self.server.sites.get(address) + req_self.hasCmdPermission = self.hasCmdPermission # Use the same permissions as current site + req_obj = super(UiWebsocket, req_self) + req = {"id": to, "cmd": cmd, "params": params} + req_obj.handleRequest(req) + # Do callback on response {"cmd": "response", "to": message_id, "result": result} def actionResponse(self, to, result): if to in self.waiting_cb: @@ -267,17 +378,48 @@ def actionSiteInfo(self, to, file_status=None): self.response(to, ret) # Join to an event channel - def actionChannelJoin(self, to, channel): - if channel not in self.channels: - self.channels.append(channel) + def actionChannelJoin(self, to, channels): + if type(channels) != list: + channels = [channels] + + for channel in channels: + if channel not in self.channels: + self.channels.append(channel) # Server variables def actionServerInfo(self, to): - ret = self.formatServerInfo() - self.response(to, ret) + back = self.formatServerInfo() + self.response(to, back) + + # Create a new wrapper nonce that allows to load html file + def actionServerGetWrapperNonce(self, to): + wrapper_nonce = self.request.getWrapperNonce() + self.response(to, wrapper_nonce) + + def actionAnnouncerInfo(self, to): + back = self.formatAnnouncerInfo(self.site) + self.response(to, back) + + def actionAnnouncerStats(self, to): + back = {} + trackers = self.site.announcer.getTrackers() + for site in self.server.sites.values(): + for tracker, stats in site.announcer.stats.iteritems(): + if tracker not in trackers: + continue + if tracker not in back: + back[tracker] = {} + is_latest_data = bool(stats["time_request"] > back[tracker].get("time_request", 0) and stats["status"]) + for key, val in stats.iteritems(): + if key.startswith("num_"): + back[tracker][key] = back[tracker].get(key, 0) + val + elif is_latest_data: + back[tracker][key] = val + + return back # Sign content.json - def actionSiteSign(self, to, privatekey=None, inner_path="content.json", response_ok=True, update_changed_files=False): + def actionSiteSign(self, to, privatekey=None, inner_path="content.json", remove_missing_optional=False, update_changed_files=False, response_ok=True): self.log.debug("Signing: %s" % inner_path) site = self.site extend = {} # Extended info for signing @@ -290,17 +432,18 @@ def actionSiteSign(self, to, privatekey=None, inner_path="content.json", respons inner_path = file_info["content_inner_path"] # Add certificate to user files - if file_info and "cert_signers" in file_info and privatekey is None: + is_user_content = file_info and ("cert_signers" in file_info or "cert_signers_pattern" in file_info) + if is_user_content and privatekey is None: cert = self.user.getCert(self.site.address) extend["cert_auth_type"] = cert["auth_type"] extend["cert_user_id"] = self.user.getCertUserId(site.address) extend["cert_sign"] = cert["cert_sign"] + self.log.debug("Extending content.json with cert %s" % extend["cert_user_id"]) - if ( - not site.settings["own"] and - self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path) - ): + if not self.hasFilePermission(inner_path): + self.log.error("SiteSign error: you don't own this site & site owner doesn't allow you to do so.") return self.response(to, {"error": "Forbidden, you can only modify your own sites"}) + if privatekey == "stored": # Get privatekey from sites.json privatekey = self.user.getSiteData(self.site.address).get("privatekey") if not privatekey: # Get privatekey from users.json auth_address @@ -310,10 +453,17 @@ def actionSiteSign(self, to, privatekey=None, inner_path="content.json", respons # Reload content.json, ignore errors to make it up-to-date site.content_manager.loadContent(inner_path, add_bad_files=False, force=True) # Sign using private key sent by user - signed = site.content_manager.sign(inner_path, privatekey, extend=extend, update_changed_files=update_changed_files) - if not signed: - self.cmd("notification", ["error", "Content sign failed: invalid private key."]) - self.response(to, {"error": "Site sign failed"}) + try: + site.content_manager.sign(inner_path, privatekey, extend=extend, update_changed_files=update_changed_files, remove_missing_optional=remove_missing_optional) + except (VerifyError, SignError) as err: + self.cmd("notification", ["error", _["Content signing failed"] + "
    %s" % err]) + self.response(to, {"error": "Site sign failed: %s" % err}) + self.log.error("Site sign failed: %s: %s" % (inner_path, Debug.formatException(err))) + return + except Exception as err: + self.cmd("notification", ["error", _["Content signing error"] + "
    %s" % Debug.formatException(err)]) + self.response(to, {"error": "Site sign error: %s" % Debug.formatException(err)}) + self.log.error("Site sign error: %s: %s" % (inner_path, Debug.formatException(err))) return site.content_manager.loadContent(inner_path, add_bad_files=False) # Load new content.json, ignore errors @@ -323,13 +473,16 @@ def actionSiteSign(self, to, privatekey=None, inner_path="content.json", respons if response_ok: self.response(to, "ok") - - return inner_path + else: + return inner_path # Sign and publish content.json - def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True): + def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign=True, remove_missing_optional=False, update_changed_files=False): if sign: - inner_path = self.actionSiteSign(to, privatekey, inner_path, response_ok=False) + inner_path = self.actionSiteSign( + to, privatekey, inner_path, response_ok=False, + remove_missing_optional=remove_missing_optional, update_changed_files=update_changed_files + ) if not inner_path: return # Publishing @@ -338,6 +491,9 @@ def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign self.site.saveSettings() self.site.announce() + if inner_path not in self.site.content_manager.contents: + return self.response(to, {"error": "File %s not found" % inner_path}) + event_name = "publish %s %s" % (self.site.address, inner_path) called_instantly = RateLimit.isAllowed(event_name, 30) thread = RateLimit.callAsync(event_name, 30, self.doSitePublish, self.site, inner_path) # Only publish once in 30 seconds @@ -345,57 +501,75 @@ def actionSitePublish(self, to, privatekey=None, inner_path="content.json", sign thread.linked = True if called_instantly: # Allowed to call instantly # At the end callback with request id and thread + self.cmd("progress", ["publish", _["Content published to {0}/{1} peers."].format(0, 5), 0]) thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=notification)) else: self.cmd( "notification", - ["info", "Content publish queued for %.0f seconds." % RateLimit.delayLeft(event_name, 30), 5000] + ["info", _["Content publish queued for {0:.0f} seconds."].format(RateLimit.delayLeft(event_name, 30)), 5000] ) self.response(to, "ok") # At the end display notification thread.link(lambda thread: self.cbSitePublish(to, self.site, thread, notification, callback=False)) def doSitePublish(self, site, inner_path): + def cbProgress(published, limit): + progress = int(float(published) / limit * 100) + self.cmd("progress", [ + "publish", + _["Content published to {0}/{1} peers."].format(published, limit), + progress + ]) diffs = site.content_manager.getDiffs(inner_path) - return site.publish(limit=5, inner_path=inner_path, diffs=diffs) + back = site.publish(limit=5, inner_path=inner_path, diffs=diffs, cb_progress=cbProgress) + if back == 0: # Failed to publish to anyone + self.cmd("progress", ["publish", _["Content publish failed."], -100]) + else: + cbProgress(back, back) + return back # Callback of site publish def cbSitePublish(self, to, site, thread, notification=True, callback=True): published = thread.value if published > 0: # Successfully published if notification: - self.cmd("notification", ["done", "Content published to %s peers." % published, 5000]) + # self.cmd("notification", ["done", _["Content published to {0} peers."].format(published), 5000]) site.updateWebsocket() # Send updated site data to local websocket clients if callback: self.response(to, "ok") else: if len(site.peers) == 0: - if sys.modules["main"].file_server.port_opened or sys.modules["main"].file_server.tor_manager.start_onions: + if any(sys.modules["main"].file_server.port_opened.values()) or sys.modules["main"].file_server.tor_manager.start_onions: if notification: - self.cmd("notification", ["info", "No peers found, but your content is ready to access.", 5000]) + self.cmd("notification", ["info", _["No peers found, but your content is ready to access."]]) if callback: self.response(to, "ok") else: if notification: self.cmd("notification", [ "info", - """Your network connection is restricted. Please, open %s port
    - on your router to make your site accessible for everyone.""" % config.fileserver_port + _(u"""{_[Your network connection is restricted. Please, open {0} port]}
    + {_[on your router to make your site accessible for everyone.]}""").format(config.fileserver_port) ]) if callback: self.response(to, {"error": "Port not opened."}) else: if notification: - self.cmd("notification", ["error", "Content publish failed."]) self.response(to, {"error": "Content publish failed."}) + def actionSiteReload(self, to, inner_path): + self.site.content_manager.loadContent(inner_path, add_bad_files=False) + self.site.storage.verifyFiles(quick_check=True) + self.site.updateWebsocket() + return "ok" + # Write a file to disk def actionFileWrite(self, to, inner_path, content_base64, ignore_bad_files=False): valid_signers = self.site.content_manager.getValidSigners(inner_path) auth_address = self.user.getAuthAddress(self.site.address) - if not self.site.settings["own"] and auth_address not in valid_signers: - self.log.debug("FileWrite forbidden %s not in %s" % (auth_address, valid_signers)) + if not self.hasFilePermission(inner_path): + self.log.error("FileWrite forbidden %s not in valid_signers %s" % (auth_address, valid_signers)) return self.response(to, {"error": "Forbidden, you can only modify your own files"}) # Try not to overwrite files currently in sync @@ -405,7 +579,7 @@ def actionFileWrite(self, to, inner_path, content_base64, ignore_bad_files=False if not found: self.cmd( "confirm", - ["This file still in sync, if you write it now, then the previous content may be lost.", "Write content anyway"], + [_["This file still in sync, if you write it now, then the previous content may be lost."], _["Write content anyway"]], lambda (res): self.actionFileWrite(to, inner_path, content_base64, ignore_bad_files=True) ) return False @@ -428,6 +602,7 @@ def actionFileWrite(self, to, inner_path, content_base64, ignore_bad_files=False self.site.storage.write(inner_path, content) except Exception, err: + self.log.error("File write error: %s" % Debug.formatException(err)) return self.response(to, {"error": "Write error: %s" % Debug.formatException(err)}) if inner_path.endswith("content.json"): @@ -441,16 +616,29 @@ def actionFileWrite(self, to, inner_path, content_base64, ignore_bad_files=False ws.event("siteChanged", self.site, {"event": ["file_done", inner_path]}) def actionFileDelete(self, to, inner_path): - if ( - not self.site.settings["own"] and - self.user.getAuthAddress(self.site.address) not in self.site.content_manager.getValidSigners(inner_path) - ): + if not self.hasFilePermission(inner_path): + self.log.error("File delete error: you don't own this site & you are not approved by the owner.") return self.response(to, {"error": "Forbidden, you can only modify your own files"}) - try: - self.site.storage.delete(inner_path) - except Exception, err: - return self.response(to, {"error": "Delete error: %s" % err}) + need_delete = True + file_info = self.site.content_manager.getFileInfo(inner_path) + if file_info and file_info.get("optional"): + # Non-existing optional files won't be removed from content.json, so we have to do it manually + self.log.debug("Deleting optional file: %s" % inner_path) + relative_path = file_info["relative_path"] + content_json = self.site.storage.loadJson(file_info["content_inner_path"]) + if relative_path in content_json.get("files_optional", {}): + del content_json["files_optional"][relative_path] + self.site.storage.writeJson(file_info["content_inner_path"], content_json) + self.site.content_manager.loadContent(file_info["content_inner_path"], add_bad_files=False, force=True) + need_delete = self.site.storage.isFile(inner_path) # File sill exists after removing from content.json (owned site) + + if need_delete: + try: + self.site.storage.delete(inner_path) + except Exception, err: + self.log.error("File delete error: %s" % err) + return self.response(to, {"error": "Delete error: %s" % err}) self.response(to, "ok") @@ -460,22 +648,36 @@ def actionFileDelete(self, to, inner_path): ws.event("siteChanged", self.site, {"event": ["file_deleted", inner_path]}) # Find data in json files - def actionFileQuery(self, to, dir_inner_path, query): + def actionFileQuery(self, to, dir_inner_path, query=None): # s = time.time() dir_path = self.site.storage.getPath(dir_inner_path) - rows = list(QueryJson.query(dir_path, query)) + rows = list(QueryJson.query(dir_path, query or "")) # self.log.debug("FileQuery %s %s done in %s" % (dir_inner_path, query, time.time()-s)) return self.response(to, rows) + # List files in directory + def actionFileList(self, to, inner_path): + try: + return list(self.site.storage.walk(inner_path)) + except Exception as err: + return {"error": str(err)} + + # List directories in a directory + def actionDirList(self, to, inner_path): + try: + return list(self.site.storage.list(inner_path)) + except Exception as err: + return {"error": str(err)} + # Sql query def actionDbQuery(self, to, query, params=None, wait_for=None): - if config.debug: + if config.debug or config.verbose: s = time.time() rows = [] try: - assert query.strip().upper().startswith("SELECT"), "Only SELECT query supported" res = self.site.storage.query(query, params) except Exception, err: # Response the error to client + self.log.error("DbQuery error: %s" % err) return self.response(to, {"error": str(err)}) # Convert result to dict for row in res: @@ -485,22 +687,45 @@ def actionDbQuery(self, to, query, params=None, wait_for=None): return self.response(to, rows) # Return file content - def actionFileGet(self, to, inner_path, required=True): + def actionFileGet(self, to, inner_path, required=True, format="text", timeout=300): try: if required or inner_path in self.site.bad_files: - self.site.needFile(inner_path, priority=6) - body = self.site.storage.read(inner_path) + with gevent.Timeout(timeout): + self.site.needFile(inner_path, priority=6) + body = self.site.storage.read(inner_path, "rb") except Exception, err: - self.log.debug("%s fileGet error: %s" % (inner_path, err)) + self.log.error("%s fileGet error: %s" % (inner_path, err)) body = None - return self.response(to, body) + if body and format == "base64": + import base64 + body = base64.b64encode(body) + self.response(to, body) - def actionFileRules(self, to, inner_path): - rules = self.site.content_manager.getRules(inner_path) - if inner_path.endswith("content.json") and rules: + def actionFileNeed(self, to, inner_path, timeout=300): + try: + with gevent.Timeout(timeout): + self.site.needFile(inner_path, priority=6) + except Exception, err: + return self.response(to, {"error": str(err)}) + return self.response(to, "ok") + + def actionFileRules(self, to, inner_path, use_my_cert=False, content=None): + if not content: # No content defined by function call content = self.site.content_manager.contents.get(inner_path) + + if not content: # File not created yet + cert = self.user.getCert(self.site.address) + if cert and cert["auth_address"] in self.site.content_manager.getValidSigners(inner_path): + # Current selected cert if valid for this site, add it to query rules + content = {} + content["cert_auth_type"] = cert["auth_type"] + content["cert_user_id"] = self.user.getCertUserId(self.site.address) + content["cert_sign"] = cert["cert_sign"] + + rules = self.site.content_manager.getRules(inner_path, content) + if inner_path.endswith("content.json") and rules: if content: - rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in content["files"].values()]) + rules["current_size"] = len(json.dumps(content)) + sum([file["size"] for file in content.get("files", {}).values()]) else: rules["current_size"] = 0 return self.response(to, rules) @@ -512,21 +737,24 @@ def actionCertAdd(self, to, domain, auth_type, auth_user_name, cert): if res is True: self.cmd( "notification", - ["done", "New certificate added: %s/%s@%s." % (auth_type, auth_user_name, domain)] + ["done", _("{_[New certificate added]:} {auth_type}/{auth_user_name}@{domain}.")] ) + self.user.setCert(self.site.address, domain) + self.site.updateWebsocket(cert_changed=domain) self.response(to, "ok") elif res is False: # Display confirmation of change cert_current = self.user.certs[domain] - body = "You current certificate: %s/%s@%s" % (cert_current["auth_type"], cert_current["auth_user_name"], domain) + body = _("{_[Your current certificate]:} {cert_current[auth_type]}/{cert_current[auth_user_name]}@{domain}") self.cmd( "confirm", - [body, "Change it to %s/%s@%s" % (auth_type, auth_user_name, domain)], + [body, _("Change it to {auth_type}/{auth_user_name}@{domain}")], lambda (res): self.cbCertAddConfirm(to, domain, auth_type, auth_user_name, cert) ) else: self.response(to, "Not changed") except Exception, err: + self.log.error("CertAdd error: Exception - %s (%s)" % (err.message, Debug.formatException(err))) self.response(to, {"error": err.message}) def cbCertAddConfirm(self, to, domain, auth_type, auth_user_name, cert): @@ -534,34 +762,42 @@ def cbCertAddConfirm(self, to, domain, auth_type, auth_user_name, cert): self.user.addCert(self.user.getAuthAddress(self.site.address), domain, auth_type, auth_user_name, cert) self.cmd( "notification", - ["done", "Certificate changed to: %s/%s@%s." % (auth_type, auth_user_name, domain)] + ["done", _("Certificate changed to: {auth_type}/{auth_user_name}@{domain}.")] ) + self.user.setCert(self.site.address, domain) + self.site.updateWebsocket(cert_changed=domain) self.response(to, "ok") # Select certificate for site - def actionCertSelect(self, to, accepted_domains=[], accept_any=False): + def actionCertSelect(self, to, accepted_domains=[], accept_any=False, accepted_pattern=None): accounts = [] - accounts.append(["", "Unique to site", ""]) # Default option + accounts.append(["", _["No certificate"], ""]) # Default option active = "" # Make it active if no other option found # Add my certs auth_address = self.user.getAuthAddress(self.site.address) # Current auth address + site_data = self.user.getSiteData(self.site.address) # Current auth address + + if not accepted_domains and not accepted_pattern: # Accept any if no filter defined + accept_any = True + for domain, cert in self.user.certs.items(): - if auth_address == cert["auth_address"]: + if auth_address == cert["auth_address"] and domain == site_data.get("cert"): active = domain title = cert["auth_user_name"] + "@" + domain - if domain in accepted_domains or not accepted_domains or accept_any: + accepted_pattern_match = accepted_pattern and SafeRe.match(accepted_pattern, domain) + if domain in accepted_domains or accept_any or accepted_pattern_match: accounts.append([domain, title, ""]) else: accounts.append([domain, title, "disabled"]) # Render the html - body = "Select account you want to use in this site:" + body = "" + _["Select account you want to use in this site:"] + "" # Accounts for domain, account, css_class in accounts: if domain == active: css_class += " active" # Currently selected option - title = "%s (currently selected)" % account + title = _(u"%s ({_[currently selected]})") % account else: title = "%s" % account body += "%s" % (css_class, domain, title) @@ -571,25 +807,23 @@ def actionCertSelect(self, to, accepted_domains=[], accept_any=False): # body+= "Accepted authorization providers by the site:" body += "
    " for domain in more_domains: - body += """ - - Register »%s + body += _(u""" + + {_[Register]} »{domain} - """ % (domain, domain) + """) body += "
    " - body += """ - - """ + """ % self.next_message_id - # Send the notification - self.cmd("notification", ["ask", body]) + self.cmd("notification", ["ask", body], lambda domain: self.actionCertSet(to, domain)) + self.cmd("injectScript", script) # - Admin actions - @@ -597,25 +831,50 @@ def actionPermissionAdd(self, to, permission): if permission not in self.site.settings["permissions"]: self.site.settings["permissions"].append(permission) self.site.saveSettings() + self.site.updateWebsocket(permission_added=permission) self.response(to, "ok") def actionPermissionRemove(self, to, permission): self.site.settings["permissions"].remove(permission) self.site.saveSettings() + self.site.updateWebsocket(permission_removed=permission) self.response(to, "ok") + def actionPermissionDetails(self, to, permission): + if permission == "ADMIN": + self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") + elif permission == "NOSANDBOX": + self.response(to, _["Modify your client's configuration and access all site"] + " " + _["(Dangerous!)"] + "") + else: + self.response(to, "") + # Set certificate that used for authenticate user for site def actionCertSet(self, to, domain): self.user.setCert(self.site.address, domain) self.site.updateWebsocket(cert_changed=domain) + self.response(to, "ok") + + # List user's certificates + def actionCertList(self, to): + back = [] + auth_address = self.user.getAuthAddress(self.site.address) + for domain, cert in self.user.certs.items(): + back.append({ + "auth_address": cert["auth_address"], + "auth_type": cert["auth_type"], + "auth_user_name": cert["auth_user_name"], + "domain": domain, + "selected": cert["auth_address"] == auth_address + }) + return back # List all site info - def actionSiteList(self, to): + def actionSiteList(self, to, connecting_sites=False): ret = [] SiteManager.site_manager.load() # Reload sites for site in self.server.sites.values(): - if not site.content_manager.contents.get("content.json"): - continue # Broken site + if not site.content_manager.contents.get("content.json") and not connecting_sites: + continue # Incomplete site ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing self.response(to, ret) @@ -629,16 +888,17 @@ def actionChannelJoinAllsite(self, to, channel): site.websockets.append(self) # Update site content.json - def actionSiteUpdate(self, to, address, check_files=False): + def actionSiteUpdate(self, to, address, check_files=False, since=None, announce=False): def updateThread(): - site.update(check_files=check_files) + site.update(announce=announce, check_files=check_files, since=since) self.response(to, "Updated") site = self.server.sites.get(address) - if not site.settings["serving"]: - site.settings["serving"] = True - site.saveSettings() if site and (site.address == self.site.address or "ADMIN" in self.site.settings["permissions"]): + if not site.settings["serving"]: + site.settings["serving"] = True + site.saveSettings() + gevent.spawn(updateThread) else: self.response(to, {"error": "Unknown site: %s" % address}) @@ -679,44 +939,232 @@ def actionSiteDelete(self, to, address): else: self.response(to, {"error": "Unknown site: %s" % address}) - def actionSiteClone(self, to, address): - self.cmd("notification", ["info", "Cloning site..."]) + def cbSiteClone(self, to, address, root_inner_path="", target_address=None): + self.cmd("notification", ["info", _["Cloning site..."]]) + site = self.server.sites.get(address) + if target_address: + target_site = self.server.sites.get(target_address) + privatekey = self.user.getSiteData(target_site.address).get("privatekey") + site.clone(target_address, privatekey, root_inner_path=root_inner_path) + self.cmd("notification", ["done", _["Site source code upgraded!"]]) + site.publish() + else: + # Generate a new site from user's bip32 seed + new_address, new_address_index, new_site_data = self.user.getNewSiteData() + new_site = site.clone(new_address, new_site_data["privatekey"], address_index=new_address_index, root_inner_path=root_inner_path) + new_site.settings["own"] = True + new_site.saveSettings() + self.cmd("notification", ["done", _["Site cloned"]]) + self.cmd("redirect", "/%s" % new_address) + gevent.spawn(new_site.announce) + return "ok" + + def actionSiteClone(self, to, address, root_inner_path="", target_address=None): + if not SiteManager.site_manager.isAddress(address): + self.response(to, {"error": "Not a site: %s" % address}) + return + + if not self.server.sites.get(address): + # Don't expose site existence + return + site = self.server.sites.get(address) - # Generate a new site from user's bip32 seed - new_address, new_address_index, new_site_data = self.user.getNewSiteData() - new_site = site.clone(new_address, new_site_data["privatekey"], address_index=new_address_index) - new_site.settings["own"] = True - new_site.saveSettings() - self.cmd("notification", ["done", "Site cloned" % new_address]) - gevent.spawn(new_site.announce) + if site.bad_files: + for bad_inner_path in site.bad_files.keys(): + is_user_file = "cert_signers" in site.content_manager.getRules(bad_inner_path) + if not is_user_file: + self.cmd("notification", ["error", _["Clone error: Site still in sync"]]) + return {"error": "Site still in sync"} + + if "ADMIN" in self.getPermissions(to): + self.cbSiteClone(to, address, root_inner_path, target_address) + else: + self.cmd( + "confirm", + [_["Clone site %s?"] % address, _["Clone"]], + lambda (res): self.cbSiteClone(to, address, root_inner_path, target_address) + ) def actionSiteSetLimit(self, to, size_limit): self.site.settings["size_limit"] = int(size_limit) self.site.saveSettings() - self.response(to, "Site size limit changed to %sMB" % size_limit) + self.response(to, "ok") + self.site.updateWebsocket() self.site.download(blind_includes=True) + def actionSiteAdd(self, to, address): + site_manager = SiteManager.site_manager + if address in site_manager.sites: + return {"error": "Site already added"} + else: + if site_manager.need(address): + return "ok" + else: + return {"error": "Invalid address"} + + def actionSiteListModifiedFiles(self, to, content_inner_path="content.json"): + content = self.site.content_manager.contents[content_inner_path] + min_mtime = content.get("modified", 0) + site_path = self.site.storage.directory + modified_files = [] + + # Load cache if not signed since last modified check + if content.get("modified", 0) < self.site.settings["cache"].get("time_modified_files_check"): + min_mtime = self.site.settings["cache"].get("time_modified_files_check") + modified_files = self.site.settings["cache"].get("modified_files", []) + + inner_paths = [content_inner_path] + content.get("includes", {}).keys() + content.get("files", {}).keys() + + for relative_inner_path in inner_paths: + inner_path = helper.getDirname(content_inner_path) + relative_inner_path + try: + is_mtime_newer = os.path.getmtime(self.site.storage.getPath(inner_path)) > min_mtime + 1 + if is_mtime_newer: + if inner_path.endswith("content.json"): + is_modified = self.site.content_manager.isModified(inner_path) + else: + previous_size = content["files"][inner_path]["size"] + is_same_size = self.site.storage.getSize(inner_path) == previous_size + ext = inner_path.rsplit(".", 1)[-1] + is_text_file = ext in ["json", "txt", "html", "js", "css"] + if is_same_size: + if is_text_file: + is_modified = self.site.content_manager.isModified(inner_path) # Check sha512 hash + else: + is_modified = False + else: + is_modified = True + + # Check ran, modified back to original value, but in the cache + if not is_modified and inner_path in modified_files: + modified_files.remove(inner_path) + else: + is_modified = False + except Exception as err: + if not self.site.storage.isFile(inner_path): # File deleted + is_modified = True + else: + raise err + if is_modified and inner_path not in modified_files: + modified_files.append(inner_path) + + self.site.settings["cache"]["time_modified_files_check"] = time.time() + self.site.settings["cache"]["modified_files"] = modified_files + return {"modified_files": modified_files} + + + def actionSiteSetSettingsValue(self, to, key, value): + if key not in ["modified_files_notification"]: + return {"error": "Can't change this key"} + + self.site.settings[key] = value + + return "ok" + + def actionUserGetSettings(self, to): + settings = self.user.sites.get(self.site.address, {}).get("settings", {}) + self.response(to, settings) + + def actionUserSetSettings(self, to, settings): + self.user.setSiteSettings(self.site.address, settings) + self.response(to, "ok") + + def actionUserGetGlobalSettings(self, to): + settings = self.user.settings + self.response(to, settings) + + def actionUserSetGlobalSettings(self, to, settings): + self.user.settings = settings + self.user.save() + self.response(to, "ok") + def actionServerUpdate(self, to): self.cmd("updating") sys.modules["main"].update_after_shutdown = True - if sys.modules["main"].file_server.tor_manager.tor_process: - sys.modules["main"].file_server.tor_manager.stopTor() + SiteManager.site_manager.save() sys.modules["main"].file_server.stop() sys.modules["main"].ui_server.stop() def actionServerPortcheck(self, to): - sys.modules["main"].file_server.port_opened = None - res = sys.modules["main"].file_server.openport() - self.response(to, res) + file_server = sys.modules["main"].file_server + file_server.portCheck() + self.response(to, file_server.port_opened) - def actionServerShutdown(self, to): + def actionServerShutdown(self, to, restart=False): + if restart: + sys.modules["main"].restart_after_shutdown = True sys.modules["main"].file_server.stop() sys.modules["main"].ui_server.stop() + def actionServerShowdirectory(self, to, directory="backup", inner_path=""): + if self.request.env["REMOTE_ADDR"] != "127.0.0.1": + return self.response(to, {"error": "Only clients from 127.0.0.1 allowed to run this command"}) + + import webbrowser + if directory == "backup": + path = os.path.abspath(config.data_dir) + elif directory == "log": + path = os.path.abspath(config.log_dir) + elif directory == "site": + path = os.path.abspath(self.site.storage.getPath(helper.getDirname(inner_path))) + + if os.path.isdir(path): + self.log.debug("Opening: %s" % path) + webbrowser.open('file://' + path) + return self.response(to, "ok") + else: + return self.response(to, {"error": "Not a directory"}) + def actionConfigSet(self, to, key, value): - if key not in ["tor"]: - self.response(to, {"error": "Forbidden"}) + if key not in config.keys_api_change_allowed: + self.response(to, {"error": "Forbidden you cannot set this config key"}) return + if key == "open_browser": + if value not in ["default_browser", "False"]: + self.response(to, {"error": "Forbidden: Invalid value"}) + return + + # Remove empty lines from lists + if type(value) is list: + value = [line for line in value if line] + config.saveValue(key, value) + + if key not in config.keys_restart_need: + if value is None: # Default value + setattr(config, key, config.parser.get_default(key)) + setattr(config.arguments, key, config.parser.get_default(key)) + else: + setattr(config, key, value) + setattr(config.arguments, key, value) + else: + config.need_restart = True + config.pending_changes[key] = value + + if key == "language": + import Translate + for translate in Translate.translates: + translate.setLanguage(value) + message = _["You have successfully changed the web interface's language!"] + "
    " + message += _["Due to the browser's caching, the full transformation could take some minute."] + self.cmd("notification", ["done", message, 10000]) + + if key == "tor_use_bridges": + if value is None: + value = False + else: + value = True + tor_manager = sys.modules["main"].file_server.tor_manager + tor_manager.request("SETCONF UseBridges=%i" % value) + + if key == "trackers_file": + config.loadTrackersFile() + + if key == "log_level": + logging.getLogger('').setLevel(logging.getLevelName(config.log_level)) + + if key == "ip_external": + gevent.spawn(sys.modules["main"].file_server.portCheck) + self.response(to, "ok") diff --git a/src/Ui/media/Fixbutton.coffee b/src/Ui/media/Fixbutton.coffee index 9e644a4e7..954d2b565 100644 --- a/src/Ui/media/Fixbutton.coffee +++ b/src/Ui/media/Fixbutton.coffee @@ -11,7 +11,7 @@ class Fixbutton return true $(".fixbutton-bg").stop().animate({"scale": 0.6}, 300, "easeOutCubic") $(".fixbutton-burger").stop().animate({"opacity": 0, "left": -20}, 300, "easeOutCubic") - $(".fixbutton-text").stop().animate({"opacity": 1, "left": 0}, 300, "easeOutBack") + $(".fixbutton-text").stop().animate({"opacity": 0.9, "left": 0}, 300, "easeOutBack") ###$(".fixbutton-bg").on "click", -> diff --git a/src/Ui/media/Infopanel.coffee b/src/Ui/media/Infopanel.coffee new file mode 100644 index 000000000..eb17eae7b --- /dev/null +++ b/src/Ui/media/Infopanel.coffee @@ -0,0 +1,50 @@ +class Infopanel + constructor: (@elem) -> + @visible = false + + show: (closed=false) => + @elem.addClass("visible") + if closed + @close() + else + @open() + + updateEvents: => + @elem.off("click") + @elem.find(".close").off("click") + + if @elem.hasClass("closed") + @elem.on "click", => + @onOpened() + @open() + else + @elem.find(".close").on "click", => + @onClosed() + @close() + + hide: => + @elem.removeClass("visible") + + close: => + @elem.addClass("closed") + @updateEvents() + return false + + open: => + @elem.removeClass("closed") + @updateEvents() + return false + + setTitle: (line1, line2) => + @elem.find(".line-1").text(line1) + @elem.find(".line-2").text(line2) + + setClosedNum: (num) => + @elem.find(".closed-num").text(num) + + setAction: (title, func) => + @elem.find(".button").text(title).off("click").on("click", func) + + + +window.Infopanel = Infopanel diff --git a/src/Ui/media/Loading.coffee b/src/Ui/media/Loading.coffee index 76c88ba89..7cd2479d4 100644 --- a/src/Ui/media/Loading.coffee +++ b/src/Ui/media/Loading.coffee @@ -1,18 +1,18 @@ class Loading - constructor: -> + constructor: (@wrapper) -> if window.show_loadingscreen then @showScreen() @timer_hide = null - setProgress: (percent) -> if @timer_hide clearInterval @timer_hide - $(".progressbar").css("width", percent*100+"%").css("opacity", "1").css("display", "block") + RateLimit 200, -> + $(".progressbar").css("transform": "scaleX(#{parseInt(percent*100)/100})").css("opacity", "1").css("display", "block") hideProgress: -> console.log "hideProgress" @timer_hide = setTimeout ( => - $(".progressbar").css("width", "100%").css("opacity", "0").hideLater(1000) + $(".progressbar").css("transform": "scaleX(1)").css("opacity", "0").hideLater(1000) ), 300 @@ -25,14 +25,30 @@ class Loading showTooLarge: (site_info) -> if $(".console .button-setlimit").length == 0 # Not displaying it yet line = @printLine("Site size: #{parseInt(site_info.settings.size/1024/1024)}MB is larger than default allowed #{parseInt(site_info.size_limit)}MB", "warning") - button = $("Open site and set size limit to #{site_info.next_size_limit}MB") - button.on "click", (-> return window.wrapper.setSizeLimit(site_info.next_size_limit) ) + button = $("" + "Open site and set size limit to #{site_info.next_size_limit}MB" + "") + button.on "click", => + button.addClass("loading") + return @wrapper.setSizeLimit(site_info.next_size_limit) line.after(button) setTimeout (=> @printLine('Ready.') ), 100 - + showTrackerTorBridge: (server_info) -> + if $(".console .button-settrackerbridge").length == 0 and not server_info.tor_use_meek_bridges + line = @printLine("Tracker connection error detected.", "error") + button = $("" + "Use Tor meek bridges for tracker connections" + "") + button.on "click", => + button.addClass("loading") + @wrapper.ws.cmd "configSet", ["tor_use_bridges", ""] + @wrapper.ws.cmd "configSet", ["trackers_proxy", "tor"] + @wrapper.ws.cmd "siteUpdate", {address: @wrapper.site_info.address, announce: true} + @wrapper.reloadIframe() + return false + line.after(button) + if not server_info.tor_has_meek_bridges + button.addClass("disabled") + @printLine("No meek bridge support in your client, please download the latest bundle.", "warning") # We dont need loadingscreen anymore hideScreen: -> @@ -66,4 +82,4 @@ class Loading -window.Loading = Loading \ No newline at end of file +window.Loading = Loading diff --git a/src/Ui/media/Notifications.coffee b/src/Ui/media/Notifications.coffee index 1a7f94fa8..393d5a446 100644 --- a/src/Ui/media/Notifications.coffee +++ b/src/Ui/media/Notifications.coffee @@ -13,7 +13,7 @@ class Notifications add: (id, type, body, timeout=0) -> - id = id.replace /[^A-Za-z0-9]/g, "" + id = id.replace /[^A-Za-z0-9-]/g, "" # Close notifications with same id for elem in $(".notification-#{id}") @close $(elem) @@ -21,12 +21,16 @@ class Notifications # Create element elem = $(".notification.template", @elem).clone().removeClass("template") elem.addClass("notification-#{type}").addClass("notification-#{id}") + if type == "progress" + elem.addClass("notification-done") # Update text if type == "error" $(".notification-icon", elem).html("!") else if type == "done" $(".notification-icon", elem).html("
    ") + else if type == "progress" + $(".notification-icon", elem).html("
    ") else if type == "ask" $(".notification-icon", elem).html("?") else @@ -53,6 +57,7 @@ class Notifications elem.css({"width": "50px", "transform": "scale(0.01)"}) elem.animate({"scale": 1}, 800, "easeOutElastic") elem.animate({"width": width}, 700, "easeInOutCubic") + $(".body", elem).css("width": (width - 80)) $(".body", elem).cssLater("box-shadow", "0px 0px 5px rgba(0,0,0,0.1)", 1000) # Close button or Confirm button @@ -64,6 +69,13 @@ class Notifications $(".select", elem).on "click", => @close elem + # Input enter + $("input", elem).on "keyup", (e) => + if e.keyCode == 13 + @close elem + + return elem + close: (elem) -> elem.stop().animate {"width": 0, "opacity": 0}, 700, "easeInOutCubic" @@ -74,4 +86,4 @@ class Notifications console.log "[Notifications]", args... -window.Notifications = Notifications \ No newline at end of file +window.Notifications = Notifications diff --git a/src/Ui/media/Wrapper.coffee b/src/Ui/media/Wrapper.coffee index 1c771fe83..5d1f2d7df 100644 --- a/src/Ui/media/Wrapper.coffee +++ b/src/Ui/media/Wrapper.coffee @@ -2,8 +2,13 @@ class Wrapper constructor: (ws_url) -> @log "Created!" - @loading = new Loading() + @loading = new Loading(@) @notifications = new Notifications($(".notifications")) + @infopanel = new Infopanel($(".infopanel")) + @infopanel.onClosed = => + @ws.cmd("siteSetSettingsValue", ["modified_files_notification", false]) + @infopanel.onOpened = => + @ws.cmd("siteSetSettingsValue", ["modified_files_notification", true]) @fixbutton = new Fixbutton() window.addEventListener("message", @onMessageInner, false) @@ -16,7 +21,10 @@ class Wrapper @ws.connect() @ws_error = null # Ws error message + @next_cmd_message_id = -1 + @site_info = null # Hold latest site info + @server_info = null # Hold latest server info @event_site_info = $.Deferred() # Event when site_info received @inner_loaded = false # If iframe loaded or not @inner_ready = false # Inner frame ready to receive messages @@ -24,8 +32,11 @@ class Wrapper @site_error = null # Latest failed file download @address = null @opener_tested = false + @announcer_line = null + + @allowed_event_constructors = [window.MouseEvent, window.KeyboardEvent, window.PointerEvent] # Allowed event constructors - window.onload = @onLoad # On iframe loaded + window.onload = @onPageLoad # On iframe loaded window.onhashchange = (e) => # On hash change @log "Hashchange", window.location.hash if window.location.hash @@ -38,6 +49,16 @@ class Wrapper $("#inner-iframe").focus() + verifyEvent: (allowed_target, e) => + if not e.originalEvent.isTrusted + throw "Event not trusted" + + if e.originalEvent.constructor not in @allowed_event_constructors + throw "Invalid event constructor: #{e.constructor} not in #{JSON.stringify(@allowed_event_constructors)}" + + if e.originalEvent.currentTarget != allowed_target[0] + throw "Invalid event target: #{e.originalEvent.currentTarget} != #{allowed_target[0]}" + # Incoming message from UiServer websocket onMessageWebsocket: (e) => message = JSON.parse(e.data) @@ -49,12 +70,14 @@ class Wrapper @sendInner message # Pass message to inner frame else if cmd == "notification" # Display notification type = message.params[0] - id = "notification-#{message.id}" + id = "notification-ws-#{message.id}" if "-" in message.params[0] # - in first param: message id defined [id, type] = message.params[0].split("-") @notifications.add(id, type, message.params[1], message.params[2]) + else if cmd == "progress" # Display notification + @actionProgress(message) else if cmd == "prompt" # Prompt input - @displayPrompt message.params[0], message.params[1], message.params[2], (res) => + @displayPrompt message.params[0], message.params[1], message.params[2], message.params[3], (res) => @ws.response message.id, res else if cmd == "confirm" # Confirm action @displayConfirm message.params[0], message.params[1], (res) => @@ -64,11 +87,25 @@ class Wrapper if message.params.address == @address # Current page @setSiteInfo message.params @updateProgress message.params + else if cmd == "setAnnouncerInfo" + @sendInner message # Pass to inner frame + if message.params.address == @address # Current page + @setAnnouncerInfo message.params + @updateProgress message.params else if cmd == "error" @notifications.add("notification-#{message.id}", "error", message.params, 0) else if cmd == "updating" # Close connection @ws.ws.close() @ws.onCloseWebsocket(null, 4000) + else if cmd == "redirect" + window.top.location = message.params + else if cmd == "injectHtml" + $("body").append(message.params) + else if cmd == "injectScript" + script_tag = $(" +

    ZeroNet requires JavaScript support.

    If you use NoScript/Tor browser: Click on toolbar icon with the notification and choose "Temp. TRUSTED" for 127.0.0.1. + -// Dont allow site to load in a popup -/* -if (window.opener) document.write("Opener not allowed") -if (window.opener && document.execCommand) document.execCommand("Stop", false) -if (window.opener && window.stop) window.stop() -*/ +
    @@ -32,20 +39,29 @@
    -
    0
    +
    -
    ! Test notification×
    + +
    + 8 +
    + 8 modified files
    content.json, data.json +
    + Sign & Publish + × +
    + Config
    @@ -55,25 +71,29 @@ - + - - - + + diff --git a/src/User/User.py b/src/User/User.py index a81e8d8db..5ee2d81af 100644 --- a/src/User/User.py +++ b/src/User/User.py @@ -2,6 +2,9 @@ import json import time +import gevent + +import util from Crypt import CryptBitcoin from Plugin import PluginManager from Config import config @@ -22,10 +25,13 @@ def __init__(self, master_address=None, master_seed=None, data={}): self.master_address = CryptBitcoin.privatekeyToAddress(self.master_seed) self.sites = data.get("sites", {}) self.certs = data.get("certs", {}) + self.settings = data.get("settings", {}) + self.delayed_save_thread = None self.log = logging.getLogger("User:%s" % self.master_address) # Save to data/users.json + @util.Noparallel(queue=True, ignore_class=True) def save(self): s = time.time() users = json.load(open("%s/users.json" % config.data_dir)) @@ -36,35 +42,52 @@ def save(self): user_data["master_seed"] = self.master_seed user_data["sites"] = self.sites user_data["certs"] = self.certs + user_data["settings"] = self.settings helper.atomicWrite("%s/users.json" % config.data_dir, json.dumps(users, indent=2, sort_keys=True)) - self.log.debug("Saved in %.3fs" % (time.time()-s)) + self.log.debug("Saved in %.3fs" % (time.time() - s)) + self.delayed_save_thread = None + + def saveDelayed(self): + if not self.delayed_save_thread: + self.delayed_save_thread = gevent.spawn_later(5, self.save) def getAddressAuthIndex(self, address): return int(address.encode("hex"), 16) + @util.Noparallel() + def generateAuthAddress(self, address): + s = time.time() + address_id = self.getAddressAuthIndex(address) # Convert site address to int + auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) + self.sites[address] = { + "auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey), + "auth_privatekey": auth_privatekey + } + self.saveDelayed() + self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s)) + return self.sites[address] + # Get user site data # Return: {"auth_address": "xxx", "auth_privatekey": "xxx"} def getSiteData(self, address, create=True): if address not in self.sites: # Generate new BIP32 child key based on site address if not create: return {"auth_address": None, "auth_privatekey": None} # Dont create user yet - s = time.time() - address_id = self.getAddressAuthIndex(address) # Convert site address to int - auth_privatekey = CryptBitcoin.hdPrivatekey(self.master_seed, address_id) - self.sites[address] = { - "auth_address": CryptBitcoin.privatekeyToAddress(auth_privatekey), - "auth_privatekey": auth_privatekey - } - self.save() - self.log.debug("Added new site: %s in %.3fs" % (address, time.time() - s)) + self.generateAuthAddress(address) return self.sites[address] def deleteSiteData(self, address): if address in self.sites: del(self.sites[address]) - self.save() + self.saveDelayed() self.log.debug("Deleted site: %s" % address) + def setSiteSettings(self, address, settings): + site_data = self.getSiteData(address) + site_data["settings"] = settings + self.saveDelayed() + return site_data + # Get data for a new, unique site # Return: [site_address, bip32_index, {"auth_address": "xxx", "auth_privatekey": "xxx", "privatekey": "xxx"}] def getNewSiteData(self): @@ -98,7 +121,6 @@ def getAuthPrivatekey(self, address, create=True): # Add cert for the user def addCert(self, auth_address, domain, auth_type, auth_user_name, cert_sign): - domain = domain.lower() # Find privatekey by auth address auth_privatekey = [site["auth_privatekey"] for site in self.sites.values() if site["auth_address"] == auth_address][0] cert_node = { @@ -130,7 +152,7 @@ def setCert(self, address, domain): else: if "cert" in site_data: del site_data["cert"] - self.save() + self.saveDelayed() return site_data # Get cert for the site address diff --git a/src/User/UserManager.py b/src/User/UserManager.py index dff7ece1f..66876dc17 100644 --- a/src/User/UserManager.py +++ b/src/User/UserManager.py @@ -1,6 +1,7 @@ # Included modules import json import logging +import time # ZeroNet Modules from User import User @@ -12,6 +13,7 @@ class UserManager(object): def __init__(self): self.users = {} + self.log = logging.getLogger("UserManager") # Load all user from data/users.json def load(self): @@ -20,6 +22,7 @@ def load(self): user_found = [] added = 0 + s = time.time() # Load new users for master_address, data in json.load(open("%s/users.json" % config.data_dir)).items(): if master_address not in self.users: @@ -32,19 +35,20 @@ def load(self): for master_address in self.users.keys(): if master_address not in user_found: del(self.users[master_address]) - logging.debug("Removed user: %s" % master_address) + self.log.debug("Removed user: %s" % master_address) if added: - logging.debug("UserManager added %s users" % added) + self.log.debug("Added %s users in %.3fs" % (added, time.time() - s)) # Create new user # Return: User def create(self, master_address=None, master_seed=None): + self.list() # Load the users if it's not loaded yet user = User(master_address, master_seed) - logging.debug("Created user: %s" % user.master_address) + self.log.debug("Created user: %s" % user.master_address) if user.master_address: # If successfully created self.users[user.master_address] = user - user.save() + user.saveDelayed() return user # List all users from data/users.json @@ -65,18 +69,3 @@ def get(self, master_address=None): user_manager = UserManager() # Singleton - - -# Debug: Reload User.py -def reloadModule(): - return "Not used" - - import imp - global User, UserManager, user_manager - User = imp.load_source("User", "src/User/User.py").User # Reload source - # module = imp.load_source("UserManager", "src/User/UserManager.py") # Reload module - # UserManager = module.UserManager - # user_manager = module.user_manager - # Reload users - user_manager = UserManager() - user_manager.load() diff --git a/src/Worker/Worker.py b/src/Worker/Worker.py index bdeb24318..399e1f326 100644 --- a/src/Worker/Worker.py +++ b/src/Worker/Worker.py @@ -28,59 +28,106 @@ def downloader(self): while self.running: # Try to pickup free file download task task = self.manager.getTask(self.peer) - if not task: # Die, no more task - self.manager.log.debug("%s: No task found, stopping" % self.key) - break + if not task: # No more task + time.sleep(0.1) # Wait a bit for new tasks + task = self.manager.getTask(self.peer) + if not task: # Still no task, stop it + self.manager.log.debug("%s: No task found, stopping" % self.key) + break if not task["time_started"]: task["time_started"] = time.time() # Task started now if task["workers_num"] > 0: # Wait a bit if someone already working on it + if task["peers"]: # It's an update + timeout = 3 + else: + timeout = 1 + + if task["size"] > 100 * 1024 * 1024: + timeout = timeout * 2 + if config.verbose: - self.manager.log.debug("%s: Someone already working on %s, sleeping 1 sec..." % (self.key, task["inner_path"])) - time.sleep(1) - if config.verbose: - self.manager.log.debug("%s: %s, task done after sleep: %s" % (self.key, task["inner_path"], task["done"])) + self.manager.log.debug("%s: Someone already working on %s (pri: %s), sleeping %s sec..." % ( + self.key, task["inner_path"], task["priority"], timeout + )) + + for sleep_i in range(1, timeout * 10): + time.sleep(0.1) + if task["done"] or task["workers_num"] == 0: + if config.verbose: + self.manager.log.debug("%s: %s, picked task free after %ss sleep. (done: %s)" % ( + self.key, task["inner_path"], 0.1 * sleep_i, task["done"] + )) + break + + if sleep_i % 10 == 0: + workers = self.manager.findWorkers(task) + if not workers or not workers[0].peer.connection: + break + worker_idle = time.time() - workers[0].peer.connection.last_recv_time + if worker_idle > 1: + if config.verbose: + self.manager.log.debug("%s: %s, worker %s seems idle, picked up task after %ss sleep. (done: %s)" % ( + self.key, task["inner_path"], workers[0].key, 0.1 * sleep_i, task["done"] + )) + break - if task["done"] is False: - self.task = task - site = task["site"] - task["workers_num"] += 1 + if task["done"]: + continue + + self.task = task + site = task["site"] + task["workers_num"] += 1 + try: + buff = self.peer.getFile(site.address, task["inner_path"], task["size"]) + except Exception, err: + self.manager.log.debug("%s: getFile error: %s" % (self.key, err)) + buff = None + if self.running is False: # Worker no longer needed or got killed + self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) + break + if task["done"] is True: # Task done, try to find new one + continue + if buff: # Download ok try: - buff = self.peer.getFile(site.address, task["inner_path"]) - except Exception, err: - self.manager.log.debug("%s: getFile error: %s" % (self.key, err)) - buff = None - if self.running is False: # Worker no longer needed or got killed - self.manager.log.debug("%s: No longer needed, returning: %s" % (self.key, task["inner_path"])) - break - if task["done"] is True: # Task done, try to find new one - continue - if buff: # Download ok correct = site.content_manager.verifyFile(task["inner_path"], buff) - else: # Download error + except Exception, err: correct = False - if correct is True or correct is None: # Hash ok or same file - self.manager.log.debug("%s: Hash correct: %s" % (self.key, task["inner_path"])) - if correct is True and task["done"] is False: # Save if changed and task not done yet - buff.seek(0) + else: # Download error + err = "Download failed" + correct = False + if correct is True or correct is None: # Verify ok or same file + self.manager.log.debug("%s: Verify correct: %s" % (self.key, task["inner_path"])) + write_error = None + if correct is True and task["done"] is False: # Save if changed and task not done yet + buff.seek(0) + try: site.storage.write(task["inner_path"], buff) - if task["done"] is False: + write_error = False + except Exception as err: + self.manager.log.error("%s: Error writing: %s (%s)" % (self.key, task["inner_path"], err)) + write_error = err + if task["done"] is False: + if write_error: + self.manager.failTask(task) + else: self.manager.doneTask(task) - task["workers_num"] -= 1 - self.task = None - else: # Hash failed - self.manager.log.debug( - "%s: Hash failed: %s, failed peers: %s" % - (self.key, task["inner_path"], len(task["failed"])) - ) - task["failed"].append(self.peer) - self.task = None - self.peer.hash_failed += 1 - if self.peer.hash_failed >= max(len(self.manager.tasks), 3) or self.peer.connection_error > 10: - # Broken peer: More fails than tasks number but atleast 3 - break - task["workers_num"] -= 1 - time.sleep(1) + task["workers_num"] -= 1 + else: # Verify failed + task["workers_num"] -= 1 + self.manager.log.debug( + "%s: Verify failed: %s, error: %s, failed peers: %s, workers: %s" % + (self.key, task["inner_path"], err, len(task["failed"]), task["workers_num"]) + ) + task["failed"].append(self.peer) + self.peer.hash_failed += 1 + if self.peer.hash_failed >= max(len(self.manager.tasks), 3) or self.peer.connection_error > 10: + # Broken peer: More fails than tasks number but atleast 3 + break + if task["inner_path"] not in site.bad_files: + # Don't need this file anymore + break + time.sleep(1) self.peer.onWorkerDone() self.running = False self.manager.removeWorker(self) diff --git a/src/Worker/WorkerManager.py b/src/Worker/WorkerManager.py index 90442bc39..e3cbfde7c 100644 --- a/src/Worker/WorkerManager.py +++ b/src/Worker/WorkerManager.py @@ -1,6 +1,5 @@ import time import logging -import random import collections import gevent @@ -38,6 +37,7 @@ def __repr__(self): def checkTasks(self): while self.running: tasks = task = worker = workers = None # Cleanup local variables + announced = False time.sleep(15) # Check every 15 sec # Clean up workers @@ -50,8 +50,7 @@ def checkTasks(self): tasks = self.tasks[:] # Copy it so removing elements wont cause any problem for task in tasks: - size_extra_time = task["size"] / (1024 * 100) # 1 second for every 100k - if task["time_started"] and time.time() >= task["time_started"] + 60 + size_extra_time: + if task["time_started"] and time.time() >= task["time_started"] + 60: self.log.debug("Timeout, Skipping: %s" % task) # Task taking too long time, skip it # Skip to next file workers workers = self.findWorkers(task) @@ -60,7 +59,8 @@ def checkTasks(self): worker.skip() else: self.failTask(task) - elif time.time() >= task["time_added"] + 60 + size_extra_time and not self.workers: # No workers left + + elif time.time() >= task["time_added"] + 60 and not self.workers: # No workers left self.log.debug("Timeout, Cleanup task: %s" % task) # Remove task self.failTask(task) @@ -69,36 +69,47 @@ def checkTasks(self): # Find more workers: Task started more than 15 sec ago or no workers workers = self.findWorkers(task) self.log.debug( - "Slow task: %s 15+%ss, (workers: %s, optional_hash_id: %s, peers: %s, failed: %s, asked: %s)" % + "Slow task: %s, (workers: %s, optional_hash_id: %s, peers: %s, failed: %s, asked: %s)" % ( - task["inner_path"], size_extra_time, len(workers), task["optional_hash_id"], + task["inner_path"], len(workers), task["optional_hash_id"], len(task["peers"] or []), len(task["failed"]), len(self.asked_peers) ) ) - task["site"].announce(mode="more") # Find more peers + if not announced: + task["site"].announce(mode="more") # Find more peers + announced = True if task["optional_hash_id"]: - if not task["time_started"]: - ask_limit = 20 - elif task["priority"] > 0: - ask_limit = max(10, time.time() - task["time_started"]) + if self.workers: + if not task["time_started"]: + ask_limit = 20 + else: + ask_limit = max(10, time.time() - task["time_started"]) + if len(self.asked_peers) < ask_limit and len(task["peers"] or []) <= len(task["failed"]) * 2: + # Re-search for high priority + self.startFindOptional(find_more=True) + if task["peers"]: + peers_try = [peer for peer in task["peers"] if peer not in task["failed"] and peer not in workers] + if peers_try: + self.startWorkers(peers_try, force_num=5) + else: + self.startFindOptional(find_more=True) else: - ask_limit = max(10, (time.time() - task["time_started"]) / 2) - if len(self.asked_peers) < ask_limit and len(task["peers"] or []) <= len(task["failed"]) * 2: - # Re-search for high priority self.startFindOptional(find_more=True) else: if task["peers"]: # Release the peer lock self.log.debug("Task peer lock release: %s" % task["inner_path"]) task["peers"] = [] - self.startWorkers() - break # One reannounce per loop + self.startWorkers() + + if len(self.tasks) > len(self.workers) * 2 and len(self.workers) < self.getMaxWorkers(): + self.startWorkers() self.log.debug("checkTasks stopped running") # Returns the next free or less worked task def getTask(self, peer): # Sort tasks by priority and worker numbers - self.tasks.sort(key=lambda task: task["priority"] - task["workers_num"] * 5, reverse=True) + self.tasks.sort(key=lambda task: task["priority"] - task["workers_num"] * 10, reverse=True) for task in self.tasks: # Find a task if task["peers"] and peer not in task["peers"]: @@ -109,12 +120,12 @@ def getTask(self, peer): continue # No peers found yet for the optional task return task - def removeGoodFileTasks(self): + def removeSolvedFileTasks(self, mark_as_good=True): for task in self.tasks[:]: if task["inner_path"] not in self.site.bad_files: - self.log.debug("No longer in bad_files, marking as good: %s" % task["inner_path"]) + self.log.debug("No longer in bad_files, marking as %s: %s" % (mark_as_good, task["inner_path"])) task["done"] = True - task["evt"].set(True) + task["evt"].set(mark_as_good) self.tasks.remove(task) if not self.tasks: self.started_task_num = 0 @@ -125,15 +136,19 @@ def onPeers(self): self.startWorkers() def getMaxWorkers(self): - if len(self.tasks) > 100: - return config.connected_limit * 2 + if len(self.tasks) > 50: + return config.workers * 3 else: - return config.connected_limit + return config.workers # Add new worker - def addWorker(self, peer): + def addWorker(self, peer, multiplexing=False, force=False): key = peer.key - if key not in self.workers and len(self.workers) < self.getMaxWorkers(): + if len(self.workers) > self.getMaxWorkers() and not force: + return False + if multiplexing: # Add even if we already have worker for this peer + key = "%s/%s" % (key, len(self.workers)) + if key not in self.workers: # We dont have worker for that peer and workers num less than max worker = Worker(self, peer) self.workers[key] = worker @@ -143,25 +158,46 @@ def addWorker(self, peer): else: # We have woker for this peer or its over the limit return False + def taskAddPeer(self, task, peer): + if task["peers"] is None: + task["peers"] = [] + if peer in task["failed"]: + return False + + if peer not in task["peers"]: + task["peers"].append(peer) + return True + # Start workers to process tasks - def startWorkers(self, peers=None): + def startWorkers(self, peers=None, force_num=0): if not self.tasks: return False # No task for workers - self.log.debug("Starting workers, tasks: %s, peers: %s, workers: %s" % (len(self.tasks), len(peers or []), len(self.workers))) if len(self.workers) >= self.getMaxWorkers() and not peers: return False # Workers number already maxed and no starting peers defined + self.log.debug( + "Starting workers, tasks: %s, peers: %s, workers: %s" % + (len(self.tasks), len(peers or []), len(self.workers)) + ) if not peers: peers = self.site.getConnectedPeers() if len(peers) < self.getMaxWorkers(): - peers += self.site.peers.values()[0:self.getMaxWorkers()] + peers += self.site.getRecentPeers(self.getMaxWorkers()) if type(peers) is set: peers = list(peers) - random.shuffle(peers) + # Sort by ping + peers.sort(key=lambda peer: peer.connection.last_ping_delay if peer.connection and len(peer.connection.waiting_requests) == 0 and peer.connection.connected else 9999) + for peer in peers: # One worker for every peer if peers and peer not in peers: continue # If peers defined and peer not valid - worker = self.addWorker(peer) + + if force_num: + worker = self.addWorker(peer, force=True) + force_num -= 1 + else: + worker = self.addWorker(peer) + if worker: self.log.debug("Added worker: %s, workers: %s/%s" % (peer.key, len(self.workers), self.getMaxWorkers())) @@ -177,13 +213,12 @@ def findOptionalTasks(self, optional_tasks, reset_task=False): for task in optional_tasks: optional_hash_id = task["optional_hash_id"] if optional_hash_id in hashfield_set: - found[optional_hash_id].append(peer) - if task["peers"] and peer not in task["peers"]: - task["peers"].append(peer) - else: - task["peers"] = [peer] if reset_task and len(task["failed"]) > 0: task["failed"] = [] + if peer in task["failed"]: + continue + if self.taskAddPeer(task, peer): + found[optional_hash_id].append(peer) return found @@ -214,16 +249,13 @@ def addOptionalPeers(self, found_ips): else: continue for peer_ip in peer_ips: - peer = self.site.addPeer(peer_ip[0], peer_ip[1], return_peer=True) + peer = self.site.addPeer(peer_ip[0], peer_ip[1], return_peer=True, source="optional") if not peer: continue - if task["peers"] is None: - task["peers"] = [] - if peer not in task["peers"]: - task["peers"].append(peer) + if self.taskAddPeer(task, peer): + found[hash_id].append(peer) if peer.hashfield.appendHashId(hash_id): # Peer has this file peer.time_hashfield = None # Peer hashfield probably outdated - found[hash_id].append(peer) return found @@ -233,7 +265,7 @@ def startFindOptional(self, reset_task=False, find_more=False, high_priority=Fal # Wait for more file requests if len(self.tasks) < 20 or high_priority: time.sleep(0.01) - if len(self.tasks) > 90: + elif len(self.tasks) > 90: time.sleep(5) else: time.sleep(0.5) @@ -252,7 +284,7 @@ def startFindOptional(self, reset_task=False, find_more=False, high_priority=Fal if found: found_peers = set([peer for peers in found.values() for peer in peers]) - self.startWorkers(found_peers) + self.startWorkers(found_peers, force_num=3) if len(found) < len(optional_hash_ids) or find_more or (high_priority and any(len(peers) < 10 for peers in found.itervalues())): self.log.debug("No local result for optional files: %s" % (optional_hash_ids - set(found))) @@ -263,8 +295,7 @@ def startFindOptional(self, reset_task=False, find_more=False, high_priority=Fal if not peers: peers = self.site.getConnectablePeers() for peer in peers: - if not peer.time_hashfield: - threads.append(gevent.spawn(peer.updateHashfield)) + threads.append(gevent.spawn(peer.updateHashfield, force=find_more)) gevent.joinall(threads, timeout=5) if time_tasks != self.time_task_added: # New task added since start @@ -278,23 +309,30 @@ def startFindOptional(self, reset_task=False, find_more=False, high_priority=Fal if found: found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) - self.startWorkers(found_peers) + self.startWorkers(found_peers, force_num=3) if len(found) < len(optional_hash_ids) or find_more: - self.log.debug("No connected hashtable result for optional files: %s" % (optional_hash_ids - set(found))) + self.log.debug( + "No connected hashtable result for optional files: %s (asked: %s)" % + (optional_hash_ids - set(found), len(self.asked_peers)) + ) + if not self.tasks: + self.log.debug("No tasks, stopping finding optional peers") + return # Try to query connected peers threads = [] - peers = [peer for peer in self.site.getConnectedPeers() if peer not in self.asked_peers] + peers = [peer for peer in self.site.getConnectedPeers() if peer.key not in self.asked_peers][0:10] if not peers: - peers = self.site.getConnectablePeers() + peers = self.site.getConnectablePeers(ignore=self.asked_peers) for peer in peers: threads.append(gevent.spawn(peer.findHashIds, list(optional_hash_ids))) - self.asked_peers.append(peer) + self.asked_peers.append(peer.key) for i in range(5): time.sleep(1) + thread_values = [thread.value for thread in threads if thread.value] if not thread_values: continue @@ -307,14 +345,17 @@ def startFindOptional(self, reset_task=False, find_more=False, high_priority=Fal if found: found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) - self.startWorkers(found_peers) + self.startWorkers(found_peers, force_num=3) if len(thread_values) == len(threads): # Got result from all started thread break if len(found) < len(optional_hash_ids): - self.log.debug("No findHash result, try random peers: %s" % (optional_hash_ids - set(found))) + self.log.debug( + "No findHash result, try random peers: %s (asked: %s)" % + (optional_hash_ids - set(found), len(self.asked_peers)) + ) # Try to query random peers if time_tasks != self.time_task_added: # New task added since start @@ -326,7 +367,7 @@ def startFindOptional(self, reset_task=False, find_more=False, high_priority=Fal for peer in peers: threads.append(gevent.spawn(peer.findHashIds, list(optional_hash_ids))) - self.asked_peers.append(peer) + self.asked_peers.append(peer.key) gevent.joinall(threads, timeout=15) @@ -336,11 +377,17 @@ def startFindOptional(self, reset_task=False, find_more=False, high_priority=Fal if found: found_peers = set([peer for hash_id_peers in found.values() for peer in hash_id_peers]) - self.startWorkers(found_peers) + self.startWorkers(found_peers, force_num=3) if len(found) < len(optional_hash_ids): self.log.debug("No findhash result for optional files: %s" % (optional_hash_ids - set(found))) + if time_tasks != self.time_task_added: # New task added since start + self.log.debug("New task since start, restarting...") + gevent.spawn_later(0.1, self.startFindOptional) + else: + self.log.debug("startFindOptional ended") + # Stop all worker def stopWorkers(self): for worker in self.workers.values(): @@ -364,12 +411,15 @@ def removeWorker(self, worker): del(self.workers[worker.key]) self.log.debug("Removed worker, workers: %s/%s" % (len(self.workers), self.getMaxWorkers())) if len(self.workers) <= self.getMaxWorkers() / 3 and len(self.asked_peers) < 10: - important_task = (task for task in self.tasks if task["priority"] > 0) - if next(important_task, None) or len(self.asked_peers) == 0: - self.startFindOptional(find_more=True) - else: - self.startFindOptional() - + optional_task = next((task for task in self.tasks if task["optional_hash_id"]), None) + if optional_task: + if len(self.workers) == 0: + self.startFindOptional(find_more=True) + else: + self.startFindOptional() + elif self.tasks and not self.workers and worker.task: + self.log.debug("Starting new workers... (tasks: %s)" % len(self.tasks)) + self.startWorkers() # Tasks sorted by this def getPriorityBoost(self, inner_path): @@ -379,23 +429,27 @@ def getPriorityBoost(self, inner_path): return 9998 # index.html also important if "-default" in inner_path: return -4 # Default files are cloning not important - elif inner_path.endswith(".css"): - return 5 # boost css files priority - elif inner_path.endswith(".js"): - return 4 # boost js files priority + elif inner_path.endswith("all.css"): + return 14 # boost css files priority + elif inner_path.endswith("all.js"): + return 13 # boost js files priority elif inner_path.endswith("dbschema.json"): - return 3 # boost database specification + return 12 # boost database specification elif inner_path.endswith("content.json"): return 1 # boost included content.json files priority a bit elif inner_path.endswith(".json"): - return 2 # boost data json files priority more + if len(inner_path) < 50: # Boost non-user json files + return 11 + else: + return 2 return 0 # Create new task and return asyncresult - def addTask(self, inner_path, peer=None, priority=0): + def addTask(self, inner_path, peer=None, priority=0, file_info=None): self.site.onFileStart(inner_path) # First task, trigger site download started task = self.findTask(inner_path) if task: # Already has task for that file + task["priority"] = max(priority, task["priority"]) if peer and task["peers"]: # This peer also has new version, add it to task possible peers task["peers"].append(peer) self.log.debug("Added peer %s to %s" % (peer.key, task["inner_path"])) @@ -404,17 +458,15 @@ def addTask(self, inner_path, peer=None, priority=0): task["failed"].remove(peer) # New update arrived, remove the peer from failed peers self.log.debug("Removed peer %s from failed %s" % (peer.key, task["inner_path"])) self.startWorkers([peer]) - - if priority: - task["priority"] += priority # Boost on priority - return task["evt"] + return task else: # No task for that file yet evt = gevent.event.AsyncResult() if peer: peers = [peer] # Only download from this peer else: peers = None - file_info = self.site.content_manager.getFileInfo(inner_path) + if not file_info: + file_info = self.site.content_manager.getFileInfo(inner_path) if file_info and file_info["optional"]: optional_hash_id = helper.toHashId(file_info["sha512"]) else: @@ -424,6 +476,10 @@ def addTask(self, inner_path, peer=None, priority=0): else: size = 0 priority += self.getPriorityBoost(inner_path) + + if self.started_task_num == 0: # Boost priority for first requested file + priority += 1 + task = { "evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "optional_hash_id": optional_hash_id, "time_added": time.time(), "time_started": None, @@ -449,7 +505,7 @@ def addTask(self, inner_path, peer=None, priority=0): else: self.startWorkers(peers) - return evt + return task # Find a task using inner_path def findTask(self, inner_path): @@ -462,7 +518,7 @@ def findTask(self, inner_path): def checkComplete(self): time.sleep(0.1) if not self.tasks: - self.log.debug("Check compelte: No tasks") + self.log.debug("Check complete: No tasks") self.onComplete() def onComplete(self): @@ -475,7 +531,10 @@ def doneTask(self, task): task["done"] = True self.tasks.remove(task) # Remove from queue if task["optional_hash_id"]: - self.log.debug("Downloaded optional file, adding to hashfield: %s" % task["inner_path"]) + self.log.debug( + "Downloaded optional file in %.3fs, adding to hashfield: %s" % + (time.time() - task["time_started"], task["inner_path"]) + ) self.site.content_manager.optionalDownloaded(task["inner_path"], task["optional_hash_id"], task["size"]) self.site.onFileDone(task["inner_path"]) task["evt"].set(True) diff --git a/src/lib/BitcoinECC/newBitcoinECC.py b/src/lib/BitcoinECC/newBitcoinECC.py index b09386bc5..65b648802 100644 --- a/src/lib/BitcoinECC/newBitcoinECC.py +++ b/src/lib/BitcoinECC/newBitcoinECC.py @@ -1,6 +1,7 @@ import random import hashlib import base64 +import math class GaussInt: def __init__(self,x,y,n,p=0): diff --git a/src/lib/PySocks/test/README b/src/lib/PySocks/test/README deleted file mode 100644 index e08608efe..000000000 --- a/src/lib/PySocks/test/README +++ /dev/null @@ -1,5 +0,0 @@ -Very rudimentary tests for Python 2 and Python 3. - -Requirements: tornado, twisted (available through pip) - -./test.sh diff --git a/src/lib/PySocks/test/httpproxy.py b/src/lib/PySocks/test/httpproxy.py deleted file mode 100644 index df0ad0312..000000000 --- a/src/lib/PySocks/test/httpproxy.py +++ /dev/null @@ -1,137 +0,0 @@ -#!/usr/bin/env python -# -# Simple asynchronous HTTP proxy with tunnelling (CONNECT). -# -# GET/POST proxying based on -# http://groups.google.com/group/python-tornado/msg/7bea08e7a049cf26 -# -# Copyright (C) 2012 Senko Rasic -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. - -import sys -import socket - -import tornado.httpserver -import tornado.ioloop -import tornado.iostream -import tornado.web -import tornado.httpclient - -__all__ = ['ProxyHandler', 'run_proxy'] - - -class ProxyHandler(tornado.web.RequestHandler): - SUPPORTED_METHODS = ['GET', 'POST', 'CONNECT'] - - @tornado.web.asynchronous - def get(self): - - def handle_response(response): - if response.error and not isinstance(response.error, - tornado.httpclient.HTTPError): - self.set_status(500) - self.write('Internal server error:\n' + str(response.error)) - self.finish() - else: - self.set_status(response.code) - for header in ('Date', 'Cache-Control', 'Server', - 'Content-Type', 'Location'): - v = response.headers.get(header) - if v: - self.set_header(header, v) - if response.body: - self.write(response.body) - self.finish() - - req = tornado.httpclient.HTTPRequest(url=self.request.uri, - method=self.request.method, body=self.request.body, - headers=self.request.headers, follow_redirects=False, - allow_nonstandard_methods=True) - - client = tornado.httpclient.AsyncHTTPClient() - try: - client.fetch(req, handle_response) - except tornado.httpclient.HTTPError as e: - if hasattr(e, 'response') and e.response: - self.handle_response(e.response) - else: - self.set_status(500) - self.write('Internal server error:\n' + str(e)) - self.finish() - - @tornado.web.asynchronous - def post(self): - return self.get() - - @tornado.web.asynchronous - def connect(self): - host, port = self.request.uri.split(':') - client = self.request.connection.stream - - def read_from_client(data): - upstream.write(data) - - def read_from_upstream(data): - client.write(data) - - def client_close(data=None): - if upstream.closed(): - return - if data: - upstream.write(data) - upstream.close() - - def upstream_close(data=None): - if client.closed(): - return - if data: - client.write(data) - client.close() - - def start_tunnel(): - client.read_until_close(client_close, read_from_client) - upstream.read_until_close(upstream_close, read_from_upstream) - client.write(b'HTTP/1.0 200 Connection established\r\n\r\n') - - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) - upstream = tornado.iostream.IOStream(s) - upstream.connect((host, int(port)), start_tunnel) - - -def run_proxy(port=8080, start_ioloop=True): - """ - Run proxy on the specified port. If start_ioloop is True (default), - the tornado IOLoop will be started immediately. - """ - app = tornado.web.Application([ - (r'.*', ProxyHandler), - ]) - app.listen(port, address="127.0.0.1") - ioloop = tornado.ioloop.IOLoop.instance() - if start_ioloop: - ioloop.start() - -if __name__ == '__main__': - port = 8081 - if len(sys.argv) > 1: - port = int(sys.argv[1]) - - print ("Running HTTP proxy server") - run_proxy(port) diff --git a/src/lib/PySocks/test/mocks b/src/lib/PySocks/test/mocks deleted file mode 100644 index 5299a3f49..000000000 Binary files a/src/lib/PySocks/test/mocks and /dev/null differ diff --git a/src/lib/PySocks/test/mocks.conf b/src/lib/PySocks/test/mocks.conf deleted file mode 100644 index ab5ef5903..000000000 --- a/src/lib/PySocks/test/mocks.conf +++ /dev/null @@ -1,104 +0,0 @@ -################################################# -# # -# Sample configuration file for MOCKS 0.0.2 # -# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # -# # -# I recommend reading the examples in this file # -# and then extending it to suite your needs. # -# # -################################################# - - - -######################### -# -# General daemon config -# ~~~~~~~~~~~~~~~~~~~~~ -# -######################### - -PORT = 1081 # Port MOCKS is to listen to -MOCKS_ADDR = 127.0.0.1 # IP adress MOCKS is to bind to -LOG_FILE = mocks.log # MOCKS log file -PID_FILE = mocks.pid # File holding MOCKS's process ID -BUFFER_SIZE = 65536 # Traffic buffer size in bytes -BACKLOG = 5 # Backlog for listen() -NEGOTIATION_TIMEOUT = 5 -CONNECTION_IDLE_TIMEOUT = 300 -BIND_TIMEOUT = 30 -SHUTDOWN_TIMEOUT = 3 -MAX_CONNECTIONS = 50 - - - -########################################################################## -# -# Client filter config -# ~~~~~~~~~~~~~~~~~~~~ -# -# Client filtering means sorting out which clients are allowed -# connection and which are not. This is basically done like this: -# MOCKS has a default behaviour regarding filtering client -# connections. This behaviour is called the 'policy' and can either -# be to ALLOW or to DENY the connection. After setting the policy -# you can specify a list of exceptions. The action MOCKS takes -# for a client matching any of these exceptions is the opposite -# of the policy (that is, if the policy is set to ALLOW the exceptions -# are denied and if the policy is set to DENY the exceptions are allowed). -# An exception is specified in the form ip_address/mask, where mask -# is optional and is an integer ranging from 0 to 32 identifying the -# number of common heading bits that ip_address and the client's IP -# address must have in order to yield a match. If mask is missing, -# 32 will be assumed. For instance, 192.168.1.0/24 will match any IP -# ranging from 192.168.1.1 to 192.168.1.255. -# -# Let's take two examples, one for each type of policy. Let's say we -# only want to allow IPs 10.12.0.0 through 10.12.255.255, 172.23.2.5 and -# 192.168.52.26 to use MOCKS. What we have to to is this: -# -# FILTER_POLICY = DENY -# FILTER_EXCEPTION = 10.12.0.0/16 -# FILTER_EXCEPTION = 172.23.2.5 # implied /32 -# FILTER_EXCEPTION = 192.168.52.26 # implied /32 -# -# Now, let's say this is a public proxy server, but for some reason -# you don't want to let any IP ranging from 192.168.1.1 to 192.168.1.255 -# and neither 10.2.5.13 to connect to it: -# -# FILTER_POLICY = ALLOW -# FILTER_EXCEPTION = 192.168.1.0/24 -# FILTER_EXCEPTION = 10.2.5.13 -# -########################################################################### - -FILTER_POLICY = ALLOW - - - -############################################################################# -# -# Upstream proxy config -# ~~~~~~~~~~~~~~~~~~~~~ -# -# You can choose to further relay traffic through another proxy server. -# MOCKS supports upstream HTTP CONNECT, SOCKS4 and SOCKS5 proxies. You -# must specify the proxy type (one of HTTPCONNECT, SOCKS4 or SOCKS5), the -# proxy address and the proxy port. Optionally you can specify an user -# name and a password used to authenicate to the upstream proxy. This is -# pretty straight forward, so let's just take an example. Let's say you -# want to use the HTTP CONNECT server at httpconnectproxy.com, on port 3128, -# using the username 'foo' and the password 'bar'. You do it like this: -# -# UP_PROXY_TYPE = HTTPCONNECT -# UP_PROXY_ADDR = httpconnectproxy.com -# UP_PROXY_PORT = 3128 -# UP_PROXY_USER = foo # These two can be missing if you -# UP_PROXY_PASSWD = bar # are not required to authenticate -# -############################################################################# - -# UP_PROXY_TYPE = HTTPCONNECT -# UP_PROXY_ADDR = 192.168.1.12 -# UP_PROXY_PORT = 3128 - - diff --git a/src/lib/PySocks/test/socks4server.py b/src/lib/PySocks/test/socks4server.py deleted file mode 100644 index 05a54b93a..000000000 --- a/src/lib/PySocks/test/socks4server.py +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python -from twisted.internet import reactor -from twisted.protocols.socks import SOCKSv4Factory - -def run_proxy(): - reactor.listenTCP(1080, SOCKSv4Factory("/dev/null"), interface="127.0.0.1") - try: - reactor.run() - except (KeyboardInterrupt, SystemExit): - reactor.stop() - -if __name__ == "__main__": - print "Running SOCKS4 proxy server" - run_proxy() diff --git a/src/lib/PySocks/test/sockstest.py b/src/lib/PySocks/test/sockstest.py deleted file mode 100644 index 526cb3fc4..000000000 --- a/src/lib/PySocks/test/sockstest.py +++ /dev/null @@ -1,174 +0,0 @@ -import sys -sys.path.append("..") -import socks -import socket - -PY3K = sys.version_info[0] == 3 - -if PY3K: - import urllib.request as urllib2 -else: - import sockshandler - import urllib2 - -def raw_HTTP_request(): - req = "GET /ip HTTP/1.1\r\n" - req += "Host: ifconfig.me\r\n" - req += "User-Agent: Mozilla\r\n" - req += "Accept: text/html\r\n" - req += "\r\n" - return req.encode() - -def socket_HTTP_test(): - s = socks.socksocket() - s.set_proxy(socks.HTTP, "127.0.0.1", 8081) - s.connect(("ifconfig.me", 80)) - s.sendall(raw_HTTP_request()) - status = s.recv(2048).splitlines()[0] - assert status.startswith(b"HTTP/1.1 200") - -def socket_SOCKS4_test(): - s = socks.socksocket() - s.set_proxy(socks.SOCKS4, "127.0.0.1", 1080) - s.connect(("ifconfig.me", 80)) - s.sendall(raw_HTTP_request()) - status = s.recv(2048).splitlines()[0] - assert status.startswith(b"HTTP/1.1 200") - -def socket_SOCKS5_test(): - s = socks.socksocket() - s.set_proxy(socks.SOCKS5, "127.0.0.1", 1081) - s.connect(("ifconfig.me", 80)) - s.sendall(raw_HTTP_request()) - status = s.recv(2048).splitlines()[0] - assert status.startswith(b"HTTP/1.1 200") - -def SOCKS5_connect_timeout_test(): - s = socks.socksocket() - s.settimeout(0.0001) - s.set_proxy(socks.SOCKS5, "8.8.8.8", 80) - try: - s.connect(("ifconfig.me", 80)) - except socks.ProxyConnectionError as e: - assert str(e.socket_err) == "timed out" - -def SOCKS5_timeout_test(): - s = socks.socksocket() - s.settimeout(0.0001) - s.set_proxy(socks.SOCKS5, "127.0.0.1", 1081) - try: - s.connect(("ifconfig.me", 4444)) - except socks.GeneralProxyError as e: - assert str(e.socket_err) == "timed out" - - -def socket_SOCKS5_auth_test(): - # TODO: add support for this test. Will need a better SOCKS5 server. - s = socks.socksocket() - s.set_proxy(socks.SOCKS5, "127.0.0.1", 1081, username="a", password="b") - s.connect(("ifconfig.me", 80)) - s.sendall(raw_HTTP_request()) - status = s.recv(2048).splitlines()[0] - assert status.startswith(b"HTTP/1.1 200") - -def socket_HTTP_IP_test(): - s = socks.socksocket() - s.set_proxy(socks.HTTP, "127.0.0.1", 8081) - s.connect(("133.242.129.236", 80)) - s.sendall(raw_HTTP_request()) - status = s.recv(2048).splitlines()[0] - assert status.startswith(b"HTTP/1.1 200") - -def socket_SOCKS4_IP_test(): - s = socks.socksocket() - s.set_proxy(socks.SOCKS4, "127.0.0.1", 1080) - s.connect(("133.242.129.236", 80)) - s.sendall(raw_HTTP_request()) - status = s.recv(2048).splitlines()[0] - assert status.startswith(b"HTTP/1.1 200") - -def socket_SOCKS5_IP_test(): - s = socks.socksocket() - s.set_proxy(socks.SOCKS5, "127.0.0.1", 1081) - s.connect(("133.242.129.236", 80)) - s.sendall(raw_HTTP_request()) - status = s.recv(2048).splitlines()[0] - assert status.startswith(b"HTTP/1.1 200") - -def urllib2_HTTP_test(): - socks.set_default_proxy(socks.HTTP, "127.0.0.1", 8081) - socks.wrap_module(urllib2) - status = urllib2.urlopen("http://ifconfig.me/ip").getcode() - assert status == 200 - -def urllib2_SOCKS5_test(): - socks.set_default_proxy(socks.SOCKS5, "127.0.0.1", 1081) - socks.wrap_module(urllib2) - status = urllib2.urlopen("http://ifconfig.me/ip").getcode() - assert status == 200 - -def urllib2_handler_HTTP_test(): - opener = urllib2.build_opener(sockshandler.SocksiPyHandler(socks.HTTP, "127.0.0.1", 8081)) - status = opener.open("http://ifconfig.me/ip").getcode() - assert status == 200 - -def urllib2_handler_SOCKS5_test(): - opener = urllib2.build_opener(sockshandler.SocksiPyHandler(socks.SOCKS5, "127.0.0.1", 1081)) - status = opener.open("http://ifconfig.me/ip").getcode() - assert status == 200 - -def global_override_HTTP_test(): - socks.set_default_proxy(socks.HTTP, "127.0.0.1", 8081) - good = socket.socket - socket.socket = socks.socksocket - status = urllib2.urlopen("http://ifconfig.me/ip").getcode() - socket.socket = good - assert status == 200 - -def global_override_SOCKS5_test(): - default_proxy = (socks.SOCKS5, "127.0.0.1", 1081) - socks.set_default_proxy(*default_proxy) - good = socket.socket - socket.socket = socks.socksocket - status = urllib2.urlopen("http://ifconfig.me/ip").getcode() - socket.socket = good - assert status == 200 - assert socks.get_default_proxy()[1].decode() == default_proxy[1] - - -def main(): - print("Running tests...") - socket_HTTP_test() - print("1/12") - socket_SOCKS4_test() - print("2/12") - socket_SOCKS5_test() - print("3/12") - if not PY3K: - urllib2_handler_HTTP_test() - print("3.33/12") - urllib2_handler_SOCKS5_test() - print("3.66/12") - socket_HTTP_IP_test() - print("4/12") - socket_SOCKS4_IP_test() - print("5/12") - socket_SOCKS5_IP_test() - print("6/12") - SOCKS5_connect_timeout_test() - print("7/12") - SOCKS5_timeout_test() - print("8/12") - urllib2_HTTP_test() - print("9/12") - urllib2_SOCKS5_test() - print("10/12") - global_override_HTTP_test() - print("11/12") - global_override_SOCKS5_test() - print("12/12") - print("All tests ran successfully") - - -if __name__ == "__main__": - main() diff --git a/src/lib/PySocks/test/test.sh b/src/lib/PySocks/test/test.sh deleted file mode 100644 index 18479b9f5..000000000 --- a/src/lib/PySocks/test/test.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash -shopt -s expand_aliases -type python2 >/dev/null 2>&1 || alias python2='python' - -echo "Starting proxy servers..." -python2 socks4server.py > /dev/null & -python2 httpproxy.py > /dev/null & -./mocks start - -sleep 2 -echo "Python 2.6 tests" -python2.6 sockstest.py -exit - -sleep 2 -echo "Python 2.7 tests" -python2.7 sockstest.py - -sleep 2 -echo "Python 3.x tests" -python3 sockstest.py - -pkill python2 > /dev/null -./mocks shutdown -echo "Finished tests" diff --git a/src/lib/geventwebsocket/AUTHORS b/src/lib/geventwebsocket/AUTHORS new file mode 100644 index 000000000..02de7096e --- /dev/null +++ b/src/lib/geventwebsocket/AUTHORS @@ -0,0 +1,9 @@ +This Websocket library for Gevent is written and maintained by + + Jeffrey Gelens + + +Contributors: + + Denis Bilenko + Lon Ingram diff --git a/src/lib/geventwebsocket/LICENSE b/src/lib/geventwebsocket/LICENSE new file mode 100644 index 000000000..2526edb32 --- /dev/null +++ b/src/lib/geventwebsocket/LICENSE @@ -0,0 +1,13 @@ + Copyright 2011-2017 Jeffrey Gelens + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/lib/geventwebsocket/__init__.py b/src/lib/geventwebsocket/__init__.py index 7e2e0167d..5ee3f9610 100644 --- a/src/lib/geventwebsocket/__init__.py +++ b/src/lib/geventwebsocket/__init__.py @@ -1,4 +1,4 @@ -VERSION = (0, 9, 3, 'final', 0) +VERSION = (0, 10, 1, 'final', 0) __all__ = [ 'WebSocketApplication', diff --git a/src/lib/geventwebsocket/_compat.py b/src/lib/geventwebsocket/_compat.py new file mode 100644 index 000000000..70354135b --- /dev/null +++ b/src/lib/geventwebsocket/_compat.py @@ -0,0 +1,23 @@ +from __future__ import absolute_import, division, print_function + +import sys +import codecs + + +PY3 = sys.version_info[0] == 3 +PY2 = sys.version_info[0] == 2 + + +if PY2: + bytes = str + text_type = unicode + string_types = basestring + range_type = xrange + iteritems = lambda x: x.iteritems() + # b = lambda x: x +else: + text_type = str + string_types = str, + range_type = range + iteritems = lambda x: iter(x.items()) + # b = lambda x: codecs.latin_1_encode(x)[0] diff --git a/src/lib/geventwebsocket/handler.py b/src/lib/geventwebsocket/handler.py index c40b03c1a..8aec77c05 100644 --- a/src/lib/geventwebsocket/handler.py +++ b/src/lib/geventwebsocket/handler.py @@ -1,10 +1,8 @@ -# Modified: Werkzeug Debugger workaround in run_websocket(self): - import base64 import hashlib -import warnings from gevent.pywsgi import WSGIHandler +from ._compat import PY3 from .websocket import WebSocket, Stream from .logging import create_logger @@ -51,10 +49,7 @@ def run_websocket(self): try: self.server.clients[self.client_address] = Client( self.client_address, self.websocket) - if self.application.__class__.__name__ == "DebuggedApplication": # Modified: Werkzeug Debugger workaround (https://bitbucket.org/Jeffrey/gevent-websocket/issue/53/if-the-application-returns-a-generator-we) - list(self.application(self.environ, lambda s, h: [])) - else: - self.application(self.environ, lambda s, h: []) + list(self.application(self.environ, lambda s, h, e=None: [])) finally: del self.server.clients[self.client_address] if not self.websocket.closed: @@ -65,8 +60,7 @@ def run_websocket(self): self.websocket = None def run_application(self): - if (hasattr(self.server, 'pre_start_hook') - and self.server.pre_start_hook): + if (hasattr(self.server, 'pre_start_hook') and self.server.pre_start_hook): self.logger.debug("Calling pre-start hook") if self.server.pre_start_hook(self): return super(WebSocketHandler, self).run_application() @@ -126,7 +120,7 @@ def upgrade_websocket(self): if self.request_version != 'HTTP/1.1': self.start_response('402 Bad Request', []) - self.logger.warning("Bad server protocol in headers: %s" % self.request_version) + self.logger.warning("Bad server protocol in headers") return ['Bad protocol version'] @@ -217,11 +211,17 @@ def upgrade_connection(self): 'wsgi.websocket': self.websocket }) + if PY3: + accept = base64.b64encode( + hashlib.sha1((key + self.GUID).encode("latin-1")).digest() + ).decode("latin-1") + else: + accept = base64.b64encode(hashlib.sha1(key + self.GUID).digest()) + headers = [ ("Upgrade", "websocket"), ("Connection", "Upgrade"), - ("Sec-WebSocket-Accept", base64.b64encode( - hashlib.sha1(key + self.GUID).digest())), + ("Sec-WebSocket-Accept", accept) ] if protocol: @@ -238,7 +238,7 @@ def logger(self): return self.server.logger def log_request(self): - if '101' not in self.status: + if '101' not in str(self.status): self.logger.info(self.format_request()) @property diff --git a/src/lib/geventwebsocket/protocols/wamp.py b/src/lib/geventwebsocket/protocols/wamp.py index b55865378..c89775be9 100644 --- a/src/lib/geventwebsocket/protocols/wamp.py +++ b/src/lib/geventwebsocket/protocols/wamp.py @@ -11,6 +11,7 @@ except ImportError: import json +from .._compat import range_type, string_types from ..exceptions import WebSocketError from .base import BaseProtocol @@ -131,7 +132,7 @@ def __init__(self, *args, **kwargs): self.prefixes = Prefixes() self.session_id = ''.join( [random.choice(string.digits + string.letters) - for i in xrange(16)]) + for i in range_type(16)]) super(WampProtocol, self).__init__(*args, **kwargs) @@ -168,9 +169,9 @@ def rpc_call(self, data): call_id, curie_or_uri = data[1:3] args = data[3:] - if not isinstance(call_id, (str, unicode)): + if not isinstance(call_id, string_types): raise Exception() - if not isinstance(curie_or_uri, (str, unicode)): + if not isinstance(curie_or_uri, string_types): raise Exception() uri = self.prefixes.resolve(curie_or_uri) @@ -178,7 +179,7 @@ def rpc_call(self, data): try: result = self.procedures.call(uri, args) result_msg = [self.MSG_CALL_RESULT, call_id, result] - except Exception, e: + except Exception as e: result_msg = [self.MSG_CALL_ERROR, call_id] + self._get_exception_info(e) @@ -190,7 +191,7 @@ def pubsub_action(self, data): if not isinstance(action, int): raise Exception() - if not isinstance(curie_or_uri, (str, unicode)): + if not isinstance(curie_or_uri, string_types): raise Exception() uri = self.prefixes.resolve(curie_or_uri) diff --git a/src/lib/geventwebsocket/resource.py b/src/lib/geventwebsocket/resource.py index 36c1fb367..549f0d32d 100644 --- a/src/lib/geventwebsocket/resource.py +++ b/src/lib/geventwebsocket/resource.py @@ -1,8 +1,15 @@ import re +import warnings from .protocols.base import BaseProtocol from .exceptions import WebSocketError +try: + from collections import OrderedDict +except ImportError: + class OrderedDict: + pass + class WebSocketApplication(object): protocol_class = BaseProtocol @@ -41,15 +48,33 @@ class Resource(object): def __init__(self, apps=None): self.apps = apps if apps else [] - def _app_by_path(self, environ_path): - # Which app matched the current path? + if isinstance(apps, dict): + if not isinstance(apps, OrderedDict): + warnings.warn("Using an unordered dictionary for the " + "app list is discouraged and may lead to " + "undefined behavior.", UserWarning) + + self.apps = apps.items() - for path, app in self.apps.iteritems(): + # An app can either be a standard WSGI application (an object we call with + # __call__(self, environ, start_response)) or a class we instantiate + # (and which can handle websockets). This function tells them apart. + # Override this if you have apps that can handle websockets but don't + # fulfill these criteria. + def _is_websocket_app(self, app): + return isinstance(app, type) and issubclass(app, WebSocketApplication) + + def _app_by_path(self, environ_path, is_websocket_request): + # Which app matched the current path? + for path, app in self.apps: if re.match(path, environ_path): - return app + if is_websocket_request == self._is_websocket_app(app): + return app + return None def app_protocol(self, path): - app = self._app_by_path(path) + # app_protocol will only be called for websocket apps + app = self._app_by_path(path, True) if hasattr(app, 'protocol_name'): return app.protocol_name() @@ -58,17 +83,18 @@ def app_protocol(self, path): def __call__(self, environ, start_response): environ = environ - current_app = self._app_by_path(environ['PATH_INFO']) + is_websocket_call = 'wsgi.websocket' in environ + current_app = self._app_by_path(environ['PATH_INFO'], is_websocket_call) if current_app is None: raise Exception("No apps defined") - if 'wsgi.websocket' in environ: + if is_websocket_call: ws = environ['wsgi.websocket'] current_app = current_app(ws) current_app.ws = ws # TODO: needed? current_app.handle() - - return None + # Always return something, calling WSGI middleware may rely on it + return [] else: return current_app(environ, start_response) diff --git a/src/lib/geventwebsocket/server.py b/src/lib/geventwebsocket/server.py index 00443b8a3..e939bd118 100644 --- a/src/lib/geventwebsocket/server.py +++ b/src/lib/geventwebsocket/server.py @@ -5,6 +5,7 @@ class WebSocketServer(WSGIServer): + handler_class = WebSocketHandler debug_log_format = ( '-' * 80 + '\n' + '%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' + @@ -18,7 +19,6 @@ def __init__(self, *args, **kwargs): self._logger = None self.clients = {} - kwargs['handler_class'] = WebSocketHandler super(WebSocketServer, self).__init__(*args, **kwargs) def handle(self, socket, address): diff --git a/src/lib/geventwebsocket/utf8validator.py b/src/lib/geventwebsocket/utf8validator.py index b8a3e8a5a..d604f9663 100644 --- a/src/lib/geventwebsocket/utf8validator.py +++ b/src/lib/geventwebsocket/utf8validator.py @@ -1,128 +1,224 @@ +from ._compat import PY3 + ############################################################################### -## -## Copyright 2011-2013 Tavendo GmbH -## -## Note: -## -## This code is a Python implementation of the algorithm -## -## "Flexible and Economical UTF-8 Decoder" -## -## by Bjoern Hoehrmann -## -## bjoern@hoehrmann.de -## http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ -## -## Licensed under the Apache License, Version 2.0 (the "License"); -## you may not use this file except in compliance with the License. -## You may obtain a copy of the License at -## -## http://www.apache.org/licenses/LICENSE-2.0 -## -## Unless required by applicable law or agreed to in writing, software -## distributed under the License is distributed on an "AS IS" BASIS, -## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -## See the License for the specific language governing permissions and -## limitations under the License. -## +# +# The MIT License (MIT) +# +# Copyright (c) Crossbar.io Technologies GmbH +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +# THE SOFTWARE. +# ############################################################################### - -## use Cython implementation of UTF8 validator if available -## +# Note: This code is a Python implementation of the algorithm +# "Flexible and Economical UTF-8 Decoder" by Bjoern Hoehrmann +# bjoern@hoehrmann.de, http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + +__all__ = ("Utf8Validator",) + + +# DFA transitions +UTF8VALIDATOR_DFA = ( + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 00..1f + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 20..3f + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 40..5f + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, # 60..7f + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, # 80..9f + 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, # a0..bf + 8, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, # c0..df + 0xa, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x3, 0x4, 0x3, 0x3, # e0..ef + 0xb, 0x6, 0x6, 0x6, 0x5, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, # f0..ff + 0x0, 0x1, 0x2, 0x3, 0x5, 0x8, 0x7, 0x1, 0x1, 0x1, 0x4, 0x6, 0x1, 0x1, 0x1, 0x1, # s0..s0 + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, # s1..s2 + 1, 2, 1, 1, 1, 1, 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, # s3..s4 + 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, # s5..s6 + 1, 3, 1, 1, 1, 1, 1, 3, 1, 3, 1, 1, 1, 1, 1, 1, 1, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, # s7..s8 +) + +UTF8_ACCEPT = 0 +UTF8_REJECT = 1 + + +# use Cython implementation of UTF8 validator if available +# try: from wsaccel.utf8validator import Utf8Validator -except: - ## fallback to pure Python implementation - - class Utf8Validator: - """ - Incremental UTF-8 validator with constant memory consumption (minimal - state). - - Implements the algorithm "Flexible and Economical UTF-8 Decoder" by - Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/). - """ - - ## DFA transitions - UTF8VALIDATOR_DFA = [ - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 00..1f - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 20..3f - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 40..5f - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 60..7f - 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, # 80..9f - 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, # a0..bf - 8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, # c0..df - 0xa,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x4,0x3,0x3, # e0..ef - 0xb,0x6,0x6,0x6,0x5,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8, # f0..ff - 0x0,0x1,0x2,0x3,0x5,0x8,0x7,0x1,0x1,0x1,0x4,0x6,0x1,0x1,0x1,0x1, # s0..s0 - 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1, # s1..s2 - 1,2,1,1,1,1,1,2,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1, # s3..s4 - 1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,3,1,3,1,1,1,1,1,1, # s5..s6 - 1,3,1,1,1,1,1,3,1,3,1,1,1,1,1,1,1,3,1,1,1,1,1,1,1,1,1,1,1,1,1,1, # s7..s8 - ] - - UTF8_ACCEPT = 0 - UTF8_REJECT = 1 - - def __init__(self): - self.reset() - - def decode(self, b): - """ - Eat one UTF-8 octet, and validate on the fly. - Returns UTF8_ACCEPT when enough octets have been consumed, in which case - self.codepoint contains the decoded Unicode code point. +except ImportError: + # + # Fallback to pure Python implementation - also for PyPy. + # + # Do NOT touch this code unless you know what you are doing! + # https://github.com/oberstet/scratchbox/tree/master/python/utf8 + # - Returns UTF8_REJECT when invalid UTF-8 was encountered. + if PY3: - Returns some other positive integer when more octets need to be eaten. - """ - type = Utf8Validator.UTF8VALIDATOR_DFA[b] + # Python 3 and above - if self.state != Utf8Validator.UTF8_ACCEPT: - self.codepoint = (b & 0x3f) | (self.codepoint << 6) - else: - self.codepoint = (0xff >> type) & b + # convert DFA table to bytes (performance) + UTF8VALIDATOR_DFA_S = bytes(UTF8VALIDATOR_DFA) - self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + self.state * 16 + type] - - return self.state - - def reset(self): - """ - Reset validator to start new incremental UTF-8 decode/validation. + class Utf8Validator(object): """ - self.state = Utf8Validator.UTF8_ACCEPT - self.codepoint = 0 - self.i = 0 + Incremental UTF-8 validator with constant memory consumption (minimal state). - def validate(self, ba): + Implements the algorithm "Flexible and Economical UTF-8 Decoder" by + Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/). """ - Incrementally validate a chunk of bytes provided as string. - Will return a quad (valid?, endsOnCodePoint?, currentIndex, totalIndex). - - As soon as an octet is encountered which renders the octet sequence - invalid, a quad with valid? == False is returned. currentIndex returns - the index within the currently consumed chunk, and totalIndex the - index within the total consumed sequence that was the point of bail out. - When valid? == True, currentIndex will be len(ba) and totalIndex the - total amount of consumed bytes. + def __init__(self): + self.reset() + + def decode(self, b): + """ + Eat one UTF-8 octet, and validate on the fly. + + Returns ``UTF8_ACCEPT`` when enough octets have been consumed, in which case + ``self.codepoint`` contains the decoded Unicode code point. + + Returns ``UTF8_REJECT`` when invalid UTF-8 was encountered. + + Returns some other positive integer when more octets need to be eaten. + """ + tt = UTF8VALIDATOR_DFA_S[b] + if self.state != UTF8_ACCEPT: + self.codepoint = (b & 0x3f) | (self.codepoint << 6) + else: + self.codepoint = (0xff >> tt) & b + self.state = UTF8VALIDATOR_DFA_S[256 + self.state * 16 + tt] + return self.state + + def reset(self): + """ + Reset validator to start new incremental UTF-8 decode/validation. + """ + self.state = UTF8_ACCEPT # the empty string is valid UTF8 + self.codepoint = 0 + self.i = 0 + + def validate(self, ba): + """ + Incrementally validate a chunk of bytes provided as string. + + Will return a quad ``(valid?, endsOnCodePoint?, currentIndex, totalIndex)``. + + As soon as an octet is encountered which renders the octet sequence + invalid, a quad with ``valid? == False`` is returned. ``currentIndex`` returns + the index within the currently consumed chunk, and ``totalIndex`` the + index within the total consumed sequence that was the point of bail out. + When ``valid? == True``, currentIndex will be ``len(ba)`` and ``totalIndex`` the + total amount of consumed bytes. + """ + # + # The code here is written for optimal JITting in PyPy, not for best + # readability by your grandma or particular elegance. Do NOT touch! + # + l = len(ba) + i = 0 + state = self.state + while i < l: + # optimized version of decode(), since we are not interested in actual code points + state = UTF8VALIDATOR_DFA_S[256 + (state << 4) + UTF8VALIDATOR_DFA_S[ba[i]]] + if state == UTF8_REJECT: + self.state = state + self.i += i + return False, False, i, self.i + i += 1 + self.state = state + self.i += l + return True, state == UTF8_ACCEPT, l, self.i + + else: + + # convert DFA table to string (performance) + UTF8VALIDATOR_DFA_S = ''.join([chr(c) for c in UTF8VALIDATOR_DFA]) + + class Utf8Validator(object): """ + Incremental UTF-8 validator with constant memory consumption (minimal state). - l = len(ba) - - for i in xrange(l): - ## optimized version of decode(), since we are not interested in actual code points - - self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + (self.state << 4) + Utf8Validator.UTF8VALIDATOR_DFA[ord(ba[i])]] - - if self.state == Utf8Validator.UTF8_REJECT: - self.i += i - return False, False, i, self.i - - self.i += l + Implements the algorithm "Flexible and Economical UTF-8 Decoder" by + Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/). + """ - return True, self.state == Utf8Validator.UTF8_ACCEPT, l, self.i + def __init__(self): + self.reset() + + def decode(self, b): + """ + Eat one UTF-8 octet, and validate on the fly. + + Returns ``UTF8_ACCEPT`` when enough octets have been consumed, in which case + ``self.codepoint`` contains the decoded Unicode code point. + + Returns ``UTF8_REJECT`` when invalid UTF-8 was encountered. + + Returns some other positive integer when more octets need to be eaten. + """ + tt = ord(UTF8VALIDATOR_DFA_S[b]) + if self.state != UTF8_ACCEPT: + self.codepoint = (b & 0x3f) | (self.codepoint << 6) + else: + self.codepoint = (0xff >> tt) & b + self.state = ord(UTF8VALIDATOR_DFA_S[256 + self.state * 16 + tt]) + return self.state + + def reset(self): + """ + Reset validator to start new incremental UTF-8 decode/validation. + """ + self.state = UTF8_ACCEPT # the empty string is valid UTF8 + self.codepoint = 0 + self.i = 0 + + def validate(self, ba): + """ + Incrementally validate a chunk of bytes provided as string. + + Will return a quad ``(valid?, endsOnCodePoint?, currentIndex, totalIndex)``. + + As soon as an octet is encountered which renders the octet sequence + invalid, a quad with ``valid? == False`` is returned. ``currentIndex`` returns + the index within the currently consumed chunk, and ``totalIndex`` the + index within the total consumed sequence that was the point of bail out. + When ``valid? == True``, currentIndex will be ``len(ba)`` and ``totalIndex`` the + total amount of consumed bytes. + """ + # + # The code here is written for optimal JITting in PyPy, not for best + # readability by your grandma or particular elegance. Do NOT touch! + # + l = len(ba) + i = 0 + state = self.state + while i < l: + # optimized version of decode(), since we are not interested in actual code points + try: + state = ord(UTF8VALIDATOR_DFA_S[256 + (state << 4) + ord(UTF8VALIDATOR_DFA_S[ba[i]])]) + except: + import ipdb; ipdb.set_trace() + if state == UTF8_REJECT: + self.state = state + self.i += i + return False, False, i, self.i + i += 1 + self.state = state + self.i += l + return True, state == UTF8_ACCEPT, l, self.i diff --git a/src/lib/geventwebsocket/websocket.py b/src/lib/geventwebsocket/websocket.py index 6d4f76d32..7aad7698e 100644 --- a/src/lib/geventwebsocket/websocket.py +++ b/src/lib/geventwebsocket/websocket.py @@ -1,11 +1,10 @@ import struct +import socket -from socket import error - +from ._compat import string_types, range_type, text_type from .exceptions import ProtocolError from .exceptions import WebSocketError from .exceptions import FrameTooLargeException - from .utf8validator import Utf8Validator @@ -62,7 +61,7 @@ def _decode_bytes(self, bytestring): """ if not bytestring: - return u'' + return '' try: return bytestring.decode('utf-8') @@ -76,13 +75,10 @@ def _encode_bytes(self, text): :returns: The utf-8 byte string equivalent of `text`. """ - if isinstance(text, str): - return text + if not isinstance(text, str): + text = text_type(text or '') - if not isinstance(text, unicode): - text = unicode(text or '') - - return text.encode('utf-8') + return text.encode("utf-8") def _is_valid_close_code(self, code): """ @@ -166,7 +162,7 @@ def handle_close(self, header, payload): raise ProtocolError('Invalid close frame: {0} {1}'.format( header, payload)) - code = struct.unpack('!H', str(payload[:2]))[0] + code = struct.unpack('!H', payload[:2])[0] payload = payload[2:] if payload: @@ -203,15 +199,15 @@ def read_frame(self): raise ProtocolError if not header.length: - return header, '' + return header, b'' try: payload = self.raw_read(header.length) - except error: - payload = '' + except socket.error: + payload = b'' except Exception: # TODO log out this exception - payload = '' + payload = b'' if len(payload) != header.length: raise WebSocketError('Unexpected EOF reading frame payload') @@ -238,7 +234,7 @@ def read_message(self): if an exception is called. Use `receive` instead. """ opcode = None - message = "" + message = bytearray() while True: header, payload = self.read_frame() @@ -286,9 +282,9 @@ def read_message(self): if opcode == self.OPCODE_TEXT: self.validate_utf8(message) - return message + return self._decode_bytes(message) else: - return bytearray(message) + return message def receive(self): """ @@ -306,7 +302,10 @@ def receive(self): self.close(1007) except ProtocolError: self.close(1002) - except error: + except socket.timeout: + self.close() + self.current_app.on_close(MSG_CLOSED) + except socket.error: self.close() self.current_app.on_close(MSG_CLOSED) @@ -320,24 +319,29 @@ def send_frame(self, message, opcode): self.current_app.on_close(MSG_ALREADY_CLOSED) raise WebSocketError(MSG_ALREADY_CLOSED) - if opcode == self.OPCODE_TEXT: + if not message: + return + + if opcode in (self.OPCODE_TEXT, self.OPCODE_PING): message = self._encode_bytes(message) elif opcode == self.OPCODE_BINARY: - message = str(message) + message = bytes(message) - header = Header.encode_header(True, opcode, '', len(message), 0) + header = Header.encode_header(True, opcode, b'', len(message), 0) try: self.raw_write(header + message) - except error: + except socket.error: raise WebSocketError(MSG_SOCKET_DEAD) + except: + raise def send(self, message, binary=None): """ Send a frame over the websocket with message as its payload """ if binary is None: - binary = not isinstance(message, (str, unicode)) + binary = not isinstance(message, string_types) opcode = self.OPCODE_BINARY if binary else self.OPCODE_TEXT @@ -347,7 +351,7 @@ def send(self, message, binary=None): self.current_app.on_close(MSG_SOCKET_DEAD) raise WebSocketError(MSG_SOCKET_DEAD) - def close(self, code=1000, message=''): + def close(self, code=1000, message=b''): """ Close the websocket and connection, sending the specified code and message. The underlying socket object is _not_ closed, that is the @@ -360,9 +364,7 @@ def close(self, code=1000, message=''): try: message = self._encode_bytes(message) - self.send_frame( - struct.pack('!H%ds' % len(message), code, message), - opcode=self.OPCODE_CLOSE) + self.send_frame(message, opcode=self.OPCODE_CLOSE) except WebSocketError: # Failed to write the closing frame but it's ok because we're # closing the socket anyway. @@ -420,18 +422,37 @@ def mask_payload(self, payload): payload = bytearray(payload) mask = bytearray(self.mask) - for i in xrange(self.length): + for i in range_type(self.length): payload[i] ^= mask[i % 4] - return str(payload) + return payload # it's the same operation unmask_payload = mask_payload def __repr__(self): - return ("
    ").format(self.fin, self.opcode, self.length, - self.flags, id(self)) + opcodes = { + 0: 'continuation(0)', + 1: 'text(1)', + 2: 'binary(2)', + 8: 'close(8)', + 9: 'ping(9)', + 10: 'pong(10)' + } + flags = { + 0x40: 'RSV1 MASK', + 0x20: 'RSV2 MASK', + 0x10: 'RSV3 MASK' + } + + return ("
    ").format( + self.fin, + opcodes.get(self.opcode, 'reserved({})'.format(self.opcode)), + self.length, + flags.get(self.flags, 'reserved({})'.format(self.flags)), + self.mask, id(self) + ) @classmethod def decode_header(cls, stream): @@ -509,7 +530,8 @@ def encode_header(cls, fin, opcode, mask, length, flags): """ first_byte = opcode second_byte = 0 - extra = '' + extra = b"" + result = bytearray() if fin: first_byte |= cls.FIN_MASK @@ -538,6 +560,11 @@ def encode_header(cls, fin, opcode, mask, length, flags): if mask: second_byte |= cls.MASK_MASK - extra += mask + result.append(first_byte) + result.append(second_byte) + result.extend(extra) + + if mask: + result.extend(mask) - return chr(first_byte) + chr(second_byte) + extra + return result diff --git a/src/lib/merkletools/LICENSE b/src/lib/merkletools/LICENSE new file mode 100644 index 000000000..aea5b275c --- /dev/null +++ b/src/lib/merkletools/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Tierion + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/lib/merkletools/README.md b/src/lib/merkletools/README.md new file mode 100644 index 000000000..6a160ad46 --- /dev/null +++ b/src/lib/merkletools/README.md @@ -0,0 +1,178 @@ +# pymerkletools +[![PyPI version](https://badge.fury.io/py/merkletools.svg)](https://badge.fury.io/py/merkletools) [![Build Status](https://travis-ci.org/Tierion/pymerkletools.svg?branch=master)](https://travis-ci.org/Tierion/pymerkletools) + +This is a Python port of [merkle-tools](https://github.com/tierion/merkle-tools). + +Tools for creating Merkle trees, generating merkle proofs, and verification of merkle proofs. + +## Installation + +``` +pip install merkletools +``` + +### Create MerkleTools Object + +```python +import merkletools + +mt = MerkleTools(hash_type="md5") # default is sha256 +# valid hashTypes include all crypto hash algorithms +# such as 'MD5', 'SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512' +# as well as the SHA3 family of algorithms +# including 'SHA3-224', 'SHA3-256', 'SHA3-384', and 'SHA3-512' +``` + +To use `sha3`, this module depends on [pysha3](https://pypi.python.org/pypi/pysha3). It will be installed as part of this module or you can install it manually with : +```bash +pip install pysha3==1.0b1 +``` + + +## Methods + +### add_leaf(value, do_hash) + +Adds a value as a leaf or a list of leafs to the tree. The value must be a hex string, otherwise set the optional `do_hash` to true to have your value hashed prior to being added to the tree. + +```python +hex_data = '05ae04314577b2783b4be98211d1b72476c59e9c413cfb2afa2f0c68e0d93911' +list_data = ['Some text data', 'perhaps'] + +mt.add_leaf(hexData) +mt.add_leaf(otherData, True) +``` + +### get_leaf_count() + +Returns the number of leaves that are currently added to the tree. + +```python +leaf_count = mt.get_leaf_count(); +``` + +### get_leaf(index) + +Returns the value of the leaf at the given index as a hex string. + +```python +leaf_value = mt.get_leaf(1) +``` + +### reset_tree() + +Removes all the leaves from the tree, prepararing to to begin creating a new tree. + +```python +mt.reset_tree() +``` + +### make_tree() + +Generates the merkle tree using the leaves that have been added. + +```python +mt.make_tree(); +``` + +### is_ready + +`.is_ready` is a boolean property indicating if the tree is built and ready to supply its root and proofs. The `is_ready` state is `True` only after calling 'make_tree()'. Adding leaves or resetting the tree will change the ready state to False. + +```python +is_ready = mt.is_ready +``` + +### get_merkle_root() + +Returns the merkle root of the tree as a hex string. If the tree is not ready, `None` is returned. + +```python +root_value = mt.get_merkle_root(); +``` + +### get_proof(index) + +Returns the proof as an array of hash objects for the leaf at the given index. If the tree is not ready or no leaf exists at the given index, null is returned. + +```python +proof = mt.get_proof(1) +``` + +The proof array contains a set of merkle sibling objects. Each object contains the sibling hash, with the key value of either right or left. The right or left value tells you where that sibling was in relation to the current hash being evaluated. This information is needed for proof validation, as explained in the following section. + +### validate_proof(proof, target_hash, merkle_root) + +Returns a boolean indicating whether or not the proof is valid and correctly connects the `target_hash` to the `merkle_root`. `proof` is a proof array as supplied by the `get_proof` method. The `target_hash` and `merkle_root` parameters must be a hex strings. + +```python +proof = [ + { right: '09096dbc49b7909917e13b795ebf289ace50b870440f10424af8845fb7761ea5' }, + { right: 'ed2456914e48c1e17b7bd922177291ef8b7f553edf1b1f66b6fc1a076524b22f' }, + { left: 'eac53dde9661daf47a428efea28c81a021c06d64f98eeabbdcff442d992153a8' }, +] +target_hash = '36e0fd847d927d68475f32a94efff30812ee3ce87c7752973f4dd7476aa2e97e' +merkle_root = 'b8b1f39aa2e3fc2dde37f3df04e829f514fb98369b522bfb35c663befa896766' + +is_valid = mt.validate_proof(proof, targetHash, merkleRoot) +``` + +The proof process uses all the proof objects in the array to attempt to prove a relationship between the `target_hash` and the `merkle_root` values. The steps to validate a proof are: + +1. Concatenate `target_hash` and the first hash in the proof array. The right or left designation specifies which side of the concatenation that the proof hash value should be on. +2. Hash the resulting value. +3. Concatenate the resulting hash with the next hash in the proof array, using the same left and right rules. +4. Hash that value and continue the process until you’ve gone through each item in the proof array. +5. The final hash value should equal the `merkle_root` value if the proof is valid, otherwise the proof is invalid. + +## Common Usage + +### Creating a tree and generating the proofs + +```python +mt = MerkleTools() + +mt.add_leaf("tierion", True) +mt.add_leaf(["bitcoin", "blockchain"], True) + +mt.make_tree() + +print "root:", mt.get_merkle_root() # root: '765f15d171871b00034ee55e48ffdf76afbc44ed0bcff5c82f31351d333c2ed1' + +print mt.get_proof(1) # [{left: '2da7240f6c88536be72abe9f04e454c6478ee29709fc3729ddfb942f804fbf08'}, + # {right: 'ef7797e13d3a75526946a3bcf00daec9fc9c9c4d51ddc7cc5df888f74dd434d1'}] + +print mt.validate_proof(mt.get_proof(1), mt.get_leaf(1), mt.get_merkle_root()) # True +``` + +## Notes + +### About tree generation + +1. Internally, leaves are stored as `bytearray`. When the tree is build, it is generated by hashing together the `bytearray` values. +2. Lonely leaf nodes are promoted to the next level up, as depicted below. + + ROOT=Hash(H+E) + / \ + / \ + H=Hash(F+G) E + / \ \ + / \ \ + F=Hash(A+B) G=Hash(C+D) E + / \ / \ \ + / \ / \ \ + A B C D E + + +### Development +This module uses Python's `hashlib` for hashing. Inside a `MerkleTools` object all +hashes are stored as Python `bytearray`. This way hashes can be concatenated simply with `+` and the result +used as input for the hash function. But for +simplicity and easy to use `MerkleTools` methods expect that both input and outputs are hex +strings. We can convert from one type to the other using default Python string methods. +For example: +```python +hash = hashlib.sha256('a').digest() # '\xca\x97\x81\x12\xca\x1b\xbd\xca\xfa\xc21\xb3\x9a#\xdcM\xa7\x86\xef\xf8\x14|Nr\xb9\x80w\x85\xaf\xeeH\xbb' +hex_string = hash.decode('hex') # 'ca978112ca1bbdcafac231b39a23dc4da786eff8147c4e72b9807785afee48bb' +back_to_hash = hash_string.decode('hex') # '\xca\x97\x81\x12\xca\x1b\xbd\xca\xfa\xc21\xb3\x9a#\xdcM\xa7\x86\xef\xf8\x14|Nr\xb9\x80w\x85\xaf\xeeH\xbb' +``` diff --git a/src/lib/merkletools/__init__.py b/src/lib/merkletools/__init__.py new file mode 100644 index 000000000..ce5c94870 --- /dev/null +++ b/src/lib/merkletools/__init__.py @@ -0,0 +1,138 @@ +import hashlib +import binascii + +class MerkleTools(object): + def __init__(self, hash_type="sha256"): + hash_type = hash_type.lower() + if hash_type == 'sha256': + self.hash_function = hashlib.sha256 + elif hash_type == 'md5': + self.hash_function = hashlib.md5 + elif hash_type == 'sha224': + self.hash_function = hashlib.sha224 + elif hash_type == 'sha384': + self.hash_function = hashlib.sha384 + elif hash_type == 'sha512': + self.hash_function = hashlib.sha512 + elif hash_type == 'sha3_256': + self.hash_function = hashlib.sha3_256 + elif hash_type == 'sha3_224': + self.hash_function = hashlib.sha3_224 + elif hash_type == 'sha3_384': + self.hash_function = hashlib.sha3_384 + elif hash_type == 'sha3_512': + self.hash_function = hashlib.sha3_512 + else: + raise Exception('`hash_type` {} nor supported'.format(hash_type)) + + self.reset_tree() + + def _to_hex(self, x): + try: # python3 + return x.hex() + except: # python2 + return binascii.hexlify(x) + + def reset_tree(self): + self.leaves = list() + self.levels = None + self.is_ready = False + + def add_leaf(self, values, do_hash=False): + self.is_ready = False + # check if single leaf + if isinstance(values, tuple) or isinstance(values, list): + for v in values: + if do_hash: + v = v.encode('utf-8') + v = self.hash_function(v).hexdigest() + v = bytearray.fromhex(v) + else: + v = bytearray.fromhex(v) + self.leaves.append(v) + else: + if do_hash: + v = values.encode("utf-8") + v = self.hash_function(v).hexdigest() + v = bytearray.fromhex(v) + else: + v = bytearray.fromhex(values) + self.leaves.append(v) + + def get_leaf(self, index): + return self._to_hex(self.leaves[index]) + + def get_leaf_count(self): + return len(self.leaves) + + def get_tree_ready_state(self): + return self.is_ready + + def _calculate_next_level(self): + solo_leave = None + N = len(self.levels[0]) # number of leaves on the level + if N % 2 == 1: # if odd number of leaves on the level + solo_leave = self.levels[0][-1] + N -= 1 + + new_level = [] + for l, r in zip(self.levels[0][0:N:2], self.levels[0][1:N:2]): + new_level.append(self.hash_function(l+r).digest()) + if solo_leave is not None: + new_level.append(solo_leave) + self.levels = [new_level, ] + self.levels # prepend new level + + def make_tree(self): + self.is_ready = False + if self.get_leaf_count() > 0: + self.levels = [self.leaves, ] + while len(self.levels[0]) > 1: + self._calculate_next_level() + self.is_ready = True + + def get_merkle_root(self): + if self.is_ready: + if self.levels is not None: + return self._to_hex(self.levels[0][0]) + else: + return None + else: + return None + + def get_proof(self, index): + if self.levels is None: + return None + elif not self.is_ready or index > len(self.leaves)-1 or index < 0: + return None + else: + proof = [] + for x in range(len(self.levels) - 1, 0, -1): + level_len = len(self.levels[x]) + if (index == level_len - 1) and (level_len % 2 == 1): # skip if this is an odd end node + index = int(index / 2.) + continue + is_right_node = index % 2 + sibling_index = index - 1 if is_right_node else index + 1 + sibling_pos = "left" if is_right_node else "right" + sibling_value = self._to_hex(self.levels[x][sibling_index]) + proof.append({sibling_pos: sibling_value}) + index = int(index / 2.) + return proof + + def validate_proof(self, proof, target_hash, merkle_root): + merkle_root = bytearray.fromhex(merkle_root) + target_hash = bytearray.fromhex(target_hash) + if len(proof) == 0: + return target_hash == merkle_root + else: + proof_hash = target_hash + for p in proof: + try: + # the sibling is a left node + sibling = bytearray.fromhex(p['left']) + proof_hash = self.hash_function(sibling + proof_hash).digest() + except: + # the sibling is a right node + sibling = bytearray.fromhex(p['right']) + proof_hash = self.hash_function(proof_hash + sibling).digest() + return proof_hash == merkle_root diff --git a/src/lib/merkletools/setup.py b/src/lib/merkletools/setup.py new file mode 100644 index 000000000..51f2341bd --- /dev/null +++ b/src/lib/merkletools/setup.py @@ -0,0 +1,29 @@ +import os + +from setuptools import find_packages +from setuptools import setup + +here = os.path.abspath(os.path.dirname(__file__)) +install_requires = [ + "pysha3==1.0b1" +] + +setup( + name='merkletools', + version='1.0.2', + description='Merkle Tools', + classifiers=[ + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 2.7", + ], + url='https://github.com/', + author='Eder Santana', + keywords='merkle tree, blockchain, tierion', + license="MIT", + packages=find_packages(), + include_package_data=False, + zip_safe=False, + install_requires=install_requires +) diff --git a/src/lib/opensslVerify/HashInfo.txt b/src/lib/opensslVerify/HashInfo.txt index f5308e272..32739d559 100644 Binary files a/src/lib/opensslVerify/HashInfo.txt and b/src/lib/opensslVerify/HashInfo.txt differ diff --git a/src/lib/opensslVerify/OpenSSL License.txt b/src/lib/opensslVerify/OpenSSL License.txt index 972344590..3090896c8 100644 --- a/src/lib/opensslVerify/OpenSSL License.txt +++ b/src/lib/opensslVerify/OpenSSL License.txt @@ -12,7 +12,7 @@ --------------- /* ==================================================================== - * Copyright (c) 1998-2011 The OpenSSL Project. All rights reserved. + * Copyright (c) 1998-2016 The OpenSSL Project. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions diff --git a/src/lib/opensslVerify/ReadMe.txt b/src/lib/opensslVerify/ReadMe.txt index 352ccef6c..d3acdd9da 100644 --- a/src/lib/opensslVerify/ReadMe.txt +++ b/src/lib/opensslVerify/ReadMe.txt @@ -1,59 +1,59 @@ -============================================================================= -OpenSSL v1.0.2a Precompiled Binaries for Win32 ------------------------------------------------------------------------------ - - *** Release Information *** - -Release Date: Mrz 20, 2015 - -Author: Frederik A. Winkelsdorf (opendec.wordpress.com) - for the Indy Project (www.indyproject.org) - -Requirements: Indy 10.5.5+ (SVN Version or Delphi 2009 and newer) - -Dependencies: The libraries have no noteworthy dependencies - -Installation: Copy both DLL files into your application directory - -Supported OS: Windows 2000 up to Windows 8 - ------------------------------------------------------------------------------ - - *** Legal Disclaimer *** - -THIS SOFTWARE IS PROVIDED BY ITS AUTHOR AND THE INDY PROJECT "AS IS" AND ANY -EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY -DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND -ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF -THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -OpenSSL license terms are provided in the file "OpenSSL License.txt". - -PLEASE CHECK IF YOU NEED TO COMPLY WITH EXPORT RESTRICTIONS FOR CRYPTOGRAPHIC -SOFTWARE AND/OR PATENTS. - ------------------------------------------------------------------------------ - - *** Build Information Win32 *** - -Built with: Microsoft Visual C++ 2008 Express Edition - The Netwide Assembler (NASM) v2.11.05 Win32 - Strawberry Perl v5.20.0.1 Win32 Portable - Windows PowerShell - FinalBuilder 7 Embarcadero Edition - -Commands: perl configure VC-WIN32 - ms\do_nasm - adjusted ms\ntdll.mak (replaced "/MD" with "/MT") - adjusted ms\version32.rc (Indy Information inserted) - nmake -f ms\ntdll.mak - nmake -f ms\ntdll.mak test - editbin.exe /rebase:base=0x11000000 libeay32.dll - editbin.exe /rebase:base=0x12000000 ssleay32.dll - +============================================================================= +OpenSSL v1.0.2l Precompiled Binaries for Win32 +----------------------------------------------------------------------------- + + *** Release Information *** + +Release Date: May 29, 2017 + +Author: Frederik A. Winkelsdorf (opendec.wordpress.com) + for the Indy Project (www.indyproject.org) + +Requirements: Indy 10.5.5+ (SVN Version or Delphi 2009 and newer) + +Dependencies: The libraries have no noteworthy dependencies + +Installation: Copy both DLL files into your application directory + +Supported OS: Windows 2000 up to Windows 10 + +----------------------------------------------------------------------------- + + *** Legal Disclaimer *** + +THIS SOFTWARE IS PROVIDED BY ITS AUTHOR AND THE INDY PROJECT "AS IS" AND ANY +EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY +DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +OpenSSL license terms are provided in the file "OpenSSL License.txt". + +PLEASE CHECK IF YOU NEED TO COMPLY WITH EXPORT RESTRICTIONS FOR CRYPTOGRAPHIC +SOFTWARE AND/OR PATENTS. + +----------------------------------------------------------------------------- + + *** Build Information Win32 *** + +Built with: Microsoft Visual C++ 2008 Express Edition + The Netwide Assembler (NASM) v2.11.08 Win32 + Strawberry Perl v5.22.0.1 Win32 Portable + Windows PowerShell + FinalBuilder 7 + +Commands: perl configure VC-WIN32 + ms\do_nasm + adjusted ms\ntdll.mak (replaced "/MD" with "/MT") + adjusted ms\version32.rc (Indy Information inserted) + nmake -f ms\ntdll.mak + nmake -f ms\ntdll.mak test + editbin.exe /rebase:base=0x11000000 libeay32.dll + editbin.exe /rebase:base=0x12000000 ssleay32.dll + ============================================================================= \ No newline at end of file diff --git a/src/lib/opensslVerify/libeay32.dll b/src/lib/opensslVerify/libeay32.dll index 6359cc5a5..b66c6bbdc 100644 Binary files a/src/lib/opensslVerify/libeay32.dll and b/src/lib/opensslVerify/libeay32.dll differ diff --git a/src/lib/opensslVerify/license.txt b/src/lib/opensslVerify/license.txt new file mode 100644 index 000000000..fb03713dd --- /dev/null +++ b/src/lib/opensslVerify/license.txt @@ -0,0 +1,127 @@ + + LICENSE ISSUES + ============== + + The OpenSSL toolkit stays under a dual license, i.e. both the conditions of + the OpenSSL License and the original SSLeay license apply to the toolkit. + See below for the actual license texts. Actually both licenses are BSD-style + Open Source licenses. In case of any license issues related to OpenSSL + please contact openssl-core@openssl.org. + + OpenSSL License + --------------- + +/* ==================================================================== + * Copyright (c) 1998-2016 The OpenSSL Project. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. All advertising materials mentioning features or use of this + * software must display the following acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit. (http://www.openssl.org/)" + * + * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to + * endorse or promote products derived from this software without + * prior written permission. For written permission, please contact + * openssl-core@openssl.org. + * + * 5. Products derived from this software may not be called "OpenSSL" + * nor may "OpenSSL" appear in their names without prior written + * permission of the OpenSSL Project. + * + * 6. Redistributions of any form whatsoever must retain the following + * acknowledgment: + * "This product includes software developed by the OpenSSL Project + * for use in the OpenSSL Toolkit (http://www.openssl.org/)" + * + * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY + * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR + * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) + * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED + * OF THE POSSIBILITY OF SUCH DAMAGE. + * ==================================================================== + * + * This product includes cryptographic software written by Eric Young + * (eay@cryptsoft.com). This product includes software written by Tim + * Hudson (tjh@cryptsoft.com). + * + */ + + Original SSLeay License + ----------------------- + +/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) + * All rights reserved. + * + * This package is an SSL implementation written + * by Eric Young (eay@cryptsoft.com). + * The implementation was written so as to conform with Netscapes SSL. + * + * This library is free for commercial and non-commercial use as long as + * the following conditions are aheared to. The following conditions + * apply to all code found in this distribution, be it the RC4, RSA, + * lhash, DES, etc., code; not just the SSL code. The SSL documentation + * included with this distribution is covered by the same copyright terms + * except that the holder is Tim Hudson (tjh@cryptsoft.com). + * + * Copyright remains Eric Young's, and as such any Copyright notices in + * the code are not to be removed. + * If this package is used in a product, Eric Young should be given attribution + * as the author of the parts of the library used. + * This can be in the form of a textual message at program startup or + * in documentation (online or textual) provided with the package. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * "This product includes cryptographic software written by + * Eric Young (eay@cryptsoft.com)" + * The word 'cryptographic' can be left out if the rouines from the library + * being used are not cryptographic related :-). + * 4. If you include any Windows specific code (or a derivative thereof) from + * the apps directory (application code) you must include an acknowledgement: + * "This product includes software written by Tim Hudson (tjh@cryptsoft.com)" + * + * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS + * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT + * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY + * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * + * The licence and distribution terms for any publically available version or + * derivative of this code cannot be changed. i.e. this code cannot simply be + * copied and put under another distribution licence + * [including the GNU Public Licence.] + */ + diff --git a/src/lib/opensslVerify/openssl.cnf b/src/lib/opensslVerify/openssl.cnf index a2c48ba17..611780084 100644 --- a/src/lib/opensslVerify/openssl.cnf +++ b/src/lib/opensslVerify/openssl.cnf @@ -1,5 +1,5 @@ [ req ] -prompt = no +prompt = yes default_bits = 2048 default_keyfile = server-key.pem distinguished_name = subject @@ -32,8 +32,8 @@ authorityKeyIdentifier = keyid,issuer basicConstraints = CA:FALSE keyUsage = digitalSignature, keyEncipherment +extendedKeyUsage = clientAuth, serverAuth subjectAltName = @alternate_names -nsComment = "OpenSSL Generated Certificate" # RFC 5280, Section 4.2.1.12 makes EKU optional # CA/Browser Baseline Requirements, Appendix (B)(3)(G) makes me confused @@ -46,8 +46,8 @@ subjectKeyIdentifier = hash basicConstraints = CA:FALSE keyUsage = digitalSignature, keyEncipherment +extendedKeyUsage = clientAuth, serverAuth subjectAltName = @alternate_names -nsComment = "OpenSSL Generated Certificate" # RFC 5280, Section 4.2.1.12 makes EKU optional # CA/Browser Baseline Requirements, Appendix (B)(3)(G) makes me confused @@ -55,16 +55,5 @@ nsComment = "OpenSSL Generated Certificate" [ alternate_names ] -DNS.1 = example.com -DNS.2 = www.example.com -DNS.3 = mail.example.com -DNS.4 = ftp.example.com - -# Add these if you need them. But usually you don't want them or -# need them in production. You may need them for development. -# DNS.5 = localhost -# DNS.6 = localhost.localdomain -# DNS.7 = 127.0.0.1 - -# IPv6 localhost -# DNS.8 = ::1 \ No newline at end of file +DNS.1 = $ENV::CN +DNS.2 = www.$ENV::CN diff --git a/src/lib/opensslVerify/openssl.exe b/src/lib/opensslVerify/openssl.exe index 1f5127e58..7bdac04f4 100644 Binary files a/src/lib/opensslVerify/openssl.exe and b/src/lib/opensslVerify/openssl.exe differ diff --git a/src/lib/opensslVerify/opensslVerify.py b/src/lib/opensslVerify/opensslVerify.py index 8103bea05..524aeb4d4 100644 --- a/src/lib/opensslVerify/opensslVerify.py +++ b/src/lib/opensslVerify/opensslVerify.py @@ -194,22 +194,15 @@ def __init__(self, library): def openLibrary(): global ssl - try: - if sys.platform.startswith("win"): - dll_path = "src/lib/opensslVerify/libeay32.dll" - elif sys.platform == "cygwin": - dll_path = "/bin/cygcrypto-1.0.0.dll" - elif os.path.isfile("../lib/libcrypto.so"): # ZeroBundle - dll_path = "../lib/libcrypto.so" - else: - dll_path = "/usr/local/ssl/lib/libcrypto.so" - ssl = _OpenSSL(dll_path) - assert ssl - except Exception, err: - ssl = _OpenSSL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') or 'libeay32') + import util.SslPatch + ssl = _OpenSSL(util.SslPatch.getLibraryPath()) logging.debug("opensslVerify loaded: %s", ssl._lib) -openLibrary() +if __name__ == "__main__": + ssl = _OpenSSL(sys.argv[1]) +else: + openLibrary() + openssl_version = "%.9X" % ssl._lib.SSLeay() NID_secp256k1 = 714 @@ -329,7 +322,7 @@ def verify_message(address, signature, message): def SetCompactSignature(pkey, hash, signature): sig = base64.b64decode(signature) if len(sig) != 65: - raise BaseException("Wrong encoding") + raise Exception("Wrong encoding") nV = ord(sig[0]) if nV < 27 or nV >= 35: return False @@ -447,8 +440,8 @@ def test(): import time import os import sys - sys.path.append("..") - from pybitcointools import bitcoin as btctools + sys.path.append("../pybitcointools") + import bitcoin as btctools print "OpenSSL version %s" % openssl_version print ssl._lib priv = "5JsunC55XGVqFQj5kPGK4MWgTL26jKbnPhjnmchSNPo75XXCwtk" @@ -456,7 +449,7 @@ def test(): sign = btctools.ecdsa_sign("hello", priv) # HGbib2kv9gm9IJjDt1FXbXFczZi35u0rZR3iPUIt5GglDDCeIQ7v8eYXVNIaLoJRI4URGZrhwmsYQ9aVtRTnTfQ= s = time.time() - for i in range(100): + for i in range(1000): pubkey = getMessagePubkey("hello", sign) verified = btctools.pubkey_to_address(pubkey) == address - print "100x Verified", verified, time.time() - s + print "1000x Verified", verified, time.time() - s diff --git a/src/lib/opensslVerify/ssleay32.dll b/src/lib/opensslVerify/ssleay32.dll index b8b86115e..2ebf965dd 100644 Binary files a/src/lib/opensslVerify/ssleay32.dll and b/src/lib/opensslVerify/ssleay32.dll differ diff --git a/src/lib/pyasn1/CHANGES b/src/lib/pyasn1/CHANGES deleted file mode 100644 index 561dedd88..000000000 --- a/src/lib/pyasn1/CHANGES +++ /dev/null @@ -1,278 +0,0 @@ -Revision 0.1.7 --------------- - -- License updated to vanilla BSD 2-Clause to ease package use - (http://opensource.org/licenses/BSD-2-Clause). -- Test suite made discoverable by unittest/unittest2 discovery feature. -- Fix to decoder working on indefinite length substrate -- end-of-octets - marker is now detected by both tag and value. Otherwise zero values may - interfere with end-of-octets marker. -- Fix to decoder to fail in cases where tagFormat indicates inappropriate - format for the type (e.g. BOOLEAN is always PRIMITIVE, SET is always - CONSTRUCTED and OCTET STRING is either of the two) -- Fix to REAL type encoder to force primitive encoding form encoding. -- Fix to CHOICE decoder to handle explicitly tagged, indefinite length - mode encoding -- Fix to REAL type decoder to handle negative REAL values correctly. Test - case added. - -Revision 0.1.6 --------------- - -- The compact (valueless) way of encoding zero INTEGERs introduced in - 0.1.5 seems to fail miserably as the world is filled with broken - BER decoders. So we had to back off the *encoder* for a while. - There's still the IntegerEncoder.supportCompactZero flag which - enables compact encoding form whenever it evaluates to True. -- Report package version on debugging code initialization. - -Revision 0.1.5 --------------- - -- Documentation updated and split into chapters to better match - web-site contents. -- Make prettyPrint() working for non-initialized pyasn1 data objects. It - used to throw an exception. -- Fix to encoder to produce empty-payload INTEGER values for zeros -- Fix to decoder to support empty-payload INTEGER and REAL values -- Fix to unit test suites imports to be able to run each from - their current directory - -Revision 0.1.4 --------------- - -- Built-in codec debugging facility added -- Added some more checks to ObjectIdentifier BER encoder catching - posible 2^8 overflow condition by two leading sub-OIDs -- Implementations overriding the AbstractDecoder.valueDecoder method - changed to return the rest of substrate behind the item being processed - rather than the unprocessed substrate within the item (which is usually - empty). -- Decoder's recursiveFlag feature generalized as a user callback function - which is passed an uninitialized object recovered from substrate and - its uninterpreted payload. -- Catch inappropriate substrate type passed to decoder. -- Expose tagMap/typeMap/Decoder objects at DER decoder to uniform API. -- Obsolete __init__.MajorVersionId replaced with __init__.__version__ - which is now in-sync with distutils. -- Package classifiers updated. -- The __init__.py's made non-empty (rumors are that they may be optimized - out by package managers). -- Bail out gracefully whenever Python version is older than 2.4. -- Fix to Real codec exponent encoding (should be in 2's complement form), - some more test cases added. -- Fix in Boolean truth testing built-in methods -- Fix to substrate underrun error handling at ObjectIdentifier BER decoder -- Fix to BER Boolean decoder that allows other pre-computed - values besides 0 and 1 -- Fix to leading 0x80 octet handling in DER/CER/DER ObjectIdentifier decoder. - See http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf - -Revision 0.1.3 --------------- - -- Include class name into asn1 value constraint violation exception. -- Fix to OctetString.prettyOut() method that looses leading zero when - building hex string. - -Revision 0.1.2 --------------- - -- Fix to __long__() to actually return longs on py2k -- Fix to OctetString.__str__() workings of a non-initialized object. -- Fix to quote initializer of OctetString.__repr__() -- Minor fix towards ObjectIdentifier.prettyIn() reliability -- ObjectIdentifier.__str__() is aliased to prettyPrint() -- Exlicit repr() calls replaced with '%r' - -Revision 0.1.1 --------------- - -- Hex/bin string initializer to OctetString object reworked - (in a backward-incompatible manner) -- Fixed float() infinity compatibility issue (affects 2.5 and earlier) -- Fixed a bug/typo at Boolean CER encoder. -- Major overhawl for Python 2.4 -- 3.2 compatibility: - + get rid of old-style types - + drop string module usage - + switch to rich comparation - + drop explicit long integer type use - + map()/filter() replaced with list comprehension - + apply() replaced with */**args - + switched to use 'key' sort() callback function - + support both __nonzero__() and __bool__() methods - + modified not to use py3k-incompatible exception syntax - + getslice() operator fully replaced with getitem() - + dictionary operations made 2K/3K compatible - + base type for encoding substrate and OctetString-based types - is now 'bytes' when running py3k and 'str' otherwise - + OctetString and derivatives now unicode compliant. - + OctetString now supports two python-neutral getters: asOcts() & asInts() - + print OctetString content in hex whenever it is not printable otherwise - + in test suite, implicit relative import replaced with the absolute one - + in test suite, string constants replaced with numerics - -Revision 0.0.13 ---------------- - -- Fix to base10 normalization function that loops on univ.Real(0) - -Revision 0.0.13b ----------------- - -- ASN.1 Real type is now supported properly. -- Objects of Constructed types now support __setitem__() -- Set/Sequence objects can now be addressed by their field names (string index) - and position (integer index). -- Typo fix to ber.SetDecoder code that prevented guided decoding operation. -- Fix to explicitly tagged items decoding support. -- Fix to OctetString.prettyPrint() to better handle non-printable content. -- Fix to repr() workings of Choice objects. - -Revision 0.0.13a ----------------- - -- Major codec re-design. -- Documentation significantly improved. -- ASN.1 Any type is now supported. -- All example ASN.1 modules moved to separate pyasn1-modules package. -- Fix to initial sub-OID overflow condition detection an encoder. -- BitString initialization value verification improved. -- The Set/Sequence.getNameByPosition() method implemented. -- Fix to proper behaviour of PermittedAlphabetConstraint object. -- Fix to improper Boolean substrate handling at CER/DER decoders. -- Changes towards performance improvement: - + all dict.has_key() & dict.get() invocations replaced with modern syntax - (this breaks compatibility with Python 2.1 and older). - + tag and tagset caches introduced to decoder - + decoder code improved to prevent unnecessary pyasn1 objects creation - + allow disabling components verification when setting components to - structured types, this is used by decoder whilst running in guided mode. - + BER decoder for integer values now looks up a small set of pre-computed - substrate values to save on decoding. - + a few pre-computed values configured to ObjectIdentifier BER encoder. - + ChoiceDecoder split-off SequenceOf one to save on unnecessary checks. - + replace slow hasattr()/getattr() calls with isinstance() introspection. - + track the number of initialized components of Constructed types to save - on default/optional components initialization. - + added a shortcut ObjectIdentifier.asTuple() to be used instead of - __getitem__() in hotspots. - + use Tag.asTuple() and pure integers at tag encoder. - + introduce and use in decoder the baseTagSet attribute of the built-in - ASN.1 types. - -Revision 0.0.12a ----------------- - -- The individual tag/length/value processing methods of - encoder.AbstractItemEncoder renamed (leading underscore stripped) - to promote overloading in cases where partial substrate processing - is required. -- The ocsp.py, ldap.py example scripts added. -- Fix to univ.ObjectIdentifier input value handler to disallow negative - sub-IDs. - -Revision 0.0.11a ----------------- - -- Decoder can now treat values of unknown types as opaque OctetString. -- Fix to Set/SetOf type decoder to handle uninitialized scalar SetOf - components correctly. - -Revision 0.0.10a ----------------- - -- API versioning mechanics retired (pyasn1.v1 -> pyasn1) what makes - it possible to zip-import pyasn1 sources (used by egg and py2exe). - -Revision 0.0.9a ---------------- - -- Allow any non-zero values in Boolean type BER decoder, as it's in - accordnance with the standard. - -Revision 0.0.8a ---------------- - -- Integer.__index__() now supported (for Python 2.5+). -- Fix to empty value encoding in BitString encoder, test case added. -- Fix to SequenceOf decoder that prevents it skipping possible Choice - typed inner component. -- Choice.getName() method added for getting currently set component - name. -- OctetsString.prettyPrint() does a single str() against its value - eliminating an extra quotes. - -Revision 0.0.7a ---------------- - -- Large tags (>31) now supported by codecs. -- Fix to encoder to properly handle explicitly tagged untagged items. -- All possible value lengths (up to 256^126) now supported by encoders. -- Fix to Tag class constructor to prevent negative IDs. - -Revision 0.0.6a ---------------- - -- Make use of setuptools. -- Constraints derivation verification (isSuperTypeOf()/isSubTypeOf()) fixed. -- Fix to constraints comparation logic -- can't cmp() hash values as it - may cause false positives due to hash conflicts. - -Revision 0.0.5a ---------------- - -- Integer BER codec reworked fixing negative values encoding bug. -- clone() and subtype() methods of Constructed ASN.1 classes now - accept optional cloneValueFlag flag which controls original value - inheritance. The default is *not* to inherit original value for - performance reasons (this may affect backward compatibility). - Performance penalty may be huge on deeply nested Constructed objects - re-creation. -- Base ASN.1 types (pyasn1.type.univ.*) do not have default values - anymore. They remain uninitialized acting as ASN.1 types. In - this model, initialized ASN.1 types represent either types with - default value installed or a type instance. -- Decoders' prototypes are now class instances rather than classes. - This is to simplify initial value installation to decoder's - prototype value. -- Bugfix to BitString BER decoder (trailing bits not regarded). -- Bugfix to Constraints use as mapping keys. -- Bugfix to Integer & BitString clone() methods -- Bugix to the way to distinguish Set from SetOf at CER/DER SetOfEncoder -- Adjustments to make it running on Python 1.5. -- In tests, substrate constants converted from hex escaped literals into - octals to overcome indefinite hex width issue occuring in young Python. -- Minor performance optimization of TagSet.isSuperTagSetOf() method -- examples/sshkey.py added - -Revision 0.0.4a ---------------- - -* Asn1ItemBase.prettyPrinter() -> *.prettyPrint() - -Revision 0.0.3a ---------------- - -* Simple ASN1 objects now hash to their Python value and don't - depend upon tag/constraints/etc. -* prettyIn & prettyOut methods of SimplleAsn1Object become public -* many syntax fixes - -Revision 0.0.2a ---------------- - -* ConstraintsIntersection.isSuperTypeOf() and - ConstraintsIntersection.hasConstraint() implemented -* Bugfix to NamedValues initialization code -* +/- operators added to NamedValues objects -* Integer.__abs__() & Integer.subtype() added -* ObjectIdentifier.prettyOut() fixes -* Allow subclass components at SequenceAndSetBase -* AbstractConstraint.__cmp__() dropped -* error.Asn1Error replaced with error.PyAsn1Error - -Revision 0.0.1a ---------------- - -* Initial public alpha release diff --git a/src/lib/pyasn1/LICENSE b/src/lib/pyasn1/LICENSE.rst similarity index 95% rename from src/lib/pyasn1/LICENSE rename to src/lib/pyasn1/LICENSE.rst index fac589b8c..02b45c430 100644 --- a/src/lib/pyasn1/LICENSE +++ b/src/lib/pyasn1/LICENSE.rst @@ -1,4 +1,4 @@ -Copyright (c) 2005-2013, Ilya Etingof +Copyright (c) 2005-2017, Ilya Etingof All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/src/lib/pyasn1/PKG-INFO b/src/lib/pyasn1/PKG-INFO deleted file mode 100644 index 5de78eceb..000000000 --- a/src/lib/pyasn1/PKG-INFO +++ /dev/null @@ -1,26 +0,0 @@ -Metadata-Version: 1.0 -Name: pyasn1 -Version: 0.1.7 -Summary: ASN.1 types and codecs -Home-page: http://sourceforge.net/projects/pyasn1/ -Author: Ilya Etingof -Author-email: ilya@glas.net -License: BSD -Description: A pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208). -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Education -Classifier: Intended Audience :: Information Technology -Classifier: Intended Audience :: Science/Research -Classifier: Intended Audience :: System Administrators -Classifier: Intended Audience :: Telecommunications Industry -Classifier: License :: OSI Approved :: BSD License -Classifier: Natural Language :: English -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 3 -Classifier: Topic :: Communications -Classifier: Topic :: Security :: Cryptography -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/src/lib/pyasn1/README b/src/lib/pyasn1/README deleted file mode 100644 index ffa3b57e5..000000000 --- a/src/lib/pyasn1/README +++ /dev/null @@ -1,68 +0,0 @@ - -ASN.1 library for Python ------------------------- - -This is an implementation of ASN.1 types and codecs in Python programming -language. It has been first written to support particular protocol (SNMP) -but then generalized to be suitable for a wide range of protocols -based on ASN.1 specification. - -FEATURES --------- - -* Generic implementation of ASN.1 types (X.208) -* Fully standard compliant BER/CER/DER codecs -* 100% Python, works with Python 2.4 up to Python 3.3 (beta 1) -* MT-safe - -MISFEATURES ------------ - -* No ASN.1 compiler (by-hand ASN.1 spec compilation into Python code required) -* Codecs are not restartable - -INSTALLATION ------------- - -The pyasn1 package uses setuptools/distutils for installation. Thus do -either: - -$ easy_install pyasn1 - -or - -$ tar zxf pyasn1-0.1.3.tar.gz -$ cd pyasn1-0.1.3 -$ python setup.py install -$ cd test -$ python suite.py # run unit tests - -OPERATION ---------- - -Perhaps a typical use would involve [by-hand] compilation of your ASN.1 -specification into pyasn1-backed Python code at your application. - -For more information on pyasn1 APIs, please, refer to the -doc/pyasn1-tutorial.html file in the distribution. - -Also refer to example modules. Take a look at pyasn1-modules package -- maybe -it already holds something useful to you. - -AVAILABILITY ------------- - -The pyasn1 package is distributed under terms and conditions of BSD-style -license. See LICENSE file in the distribution. Source code is freely -available from: - -http://pyasn1.sf.net - - -FEEDBACK --------- - -Please, send your comments and fixes to mailing lists at project web site. - -=-=-= -mailto: ilya@glas.net diff --git a/src/lib/pyasn1/THANKS b/src/lib/pyasn1/THANKS deleted file mode 100644 index 4de1713c0..000000000 --- a/src/lib/pyasn1/THANKS +++ /dev/null @@ -1,4 +0,0 @@ -Denis S. Otkidach -Gregory Golberg -Bud P. Bruegger -Jacek Konieczny diff --git a/src/lib/pyasn1/TODO b/src/lib/pyasn1/TODO deleted file mode 100644 index 0ee211c2a..000000000 --- a/src/lib/pyasn1/TODO +++ /dev/null @@ -1,36 +0,0 @@ -* Specialize ASN.1 character and useful types -* Come up with simpler API for deeply nested constructed objects - addressing - -ber.decoder: -* suspend codec on underrun error ? -* class-static components map (in simple type classes) -* present subtypes ? -* component presence check wont work at innertypeconst -* add the rest of ASN1 types/codecs -* type vs value, defaultValue - -ber.encoder: -* Asn1Item.clone() / shallowcopy issue -* large length encoder? -* codec restart -* preserve compatible API whenever stateful codec gets implemented -* restartable vs incremental -* plan: make a stateless univeral decoder, then convert it to restartable - then to incremental - -type.useful: -* may need to implement prettyIn/Out - -type.char: -* may need to implement constraints - -type.univ: -* simpler API to constructed objects: value init, recursive - -type.namedtypes -* type vs tagset name convention - -general: - -* how untagged TagSet should be initialized? diff --git a/src/lib/pyasn1/__init__.py b/src/lib/pyasn1/__init__.py index 88aff79c8..091f6c3c9 100644 --- a/src/lib/pyasn1/__init__.py +++ b/src/lib/pyasn1/__init__.py @@ -1,8 +1,8 @@ import sys # http://www.python.org/dev/peps/pep-0396/ -__version__ = '0.1.7' +__version__ = '0.2.4' if sys.version_info[:2] < (2, 4): - raise RuntimeError('PyASN1 requires Python 2.4 or later') + raise RuntimeError('PyASN1 requires Python 2.4 or later') diff --git a/src/lib/pyasn1/codec/ber/decoder.py b/src/lib/pyasn1/codec/ber/decoder.py index be0cf4907..e100e7503 100644 --- a/src/lib/pyasn1/codec/ber/decoder.py +++ b/src/lib/pyasn1/codec/ber/decoder.py @@ -1,56 +1,72 @@ -# BER decoder -from pyasn1.type import tag, base, univ, char, useful, tagmap +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import base, tag, univ, char, useful, tagmap from pyasn1.codec.ber import eoo -from pyasn1.compat.octets import oct2int, octs2ints, isOctetsType +from pyasn1.compat.octets import oct2int, octs2ints, ints2octs, ensureString, null +from pyasn1.compat.integer import from_bytes from pyasn1 import debug, error -class AbstractDecoder: +__all__ = ['decode'] + + +class AbstractDecoder(object): protoComponent = None + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,)) def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, - length, state, decodeFun, substrateFun): + length, state, decodeFun, substrateFun): raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,)) + class AbstractSimpleDecoder(AbstractDecoder): tagFormats = (tag.tagFormatSimple,) + + @staticmethod + def substrateCollector(asn1Object, substrate, length): + return substrate[:length], substrate[length:] + def _createComponent(self, asn1Spec, tagSet, value=None): - if tagSet[0][1] not in self.tagFormats: - raise error.PyAsn1Error('Invalid tag format %r for %r' % (tagSet[0], self.protoComponent,)) + if tagSet[0].tagFormat not in self.tagFormats: + raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType())) if asn1Spec is None: return self.protoComponent.clone(value, tagSet) elif value is None: return asn1Spec else: return asn1Spec.clone(value) - + + class AbstractConstructedDecoder(AbstractDecoder): tagFormats = (tag.tagFormatConstructed,) + + # noinspection PyUnusedLocal def _createComponent(self, asn1Spec, tagSet, value=None): - if tagSet[0][1] not in self.tagFormats: - raise error.PyAsn1Error('Invalid tag format %r for %r' % (tagSet[0], self.protoComponent,)) + if tagSet[0].tagFormat not in self.tagFormats: + raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType())) if asn1Spec is None: return self.protoComponent.clone(tagSet) else: return asn1Spec.clone() - -class EndOfOctetsDecoder(AbstractSimpleDecoder): - def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, - length, state, decodeFun, substrateFun): - return eoo.endOfOctets, substrate[length:] + class ExplicitTagDecoder(AbstractSimpleDecoder): protoComponent = univ.Any('') tagFormats = (tag.tagFormatConstructed,) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): if substrateFun: return substrateFun( - self._createComponent(asn1Spec, tagSet, ''), - substrate, length - ) + self._createComponent(asn1Spec, tagSet, ''), + substrate, length + ) head, tail = substrate[:length], substrate[length:] value, _ = decodeFun(head, asn1Spec, tagSet, length) return value, tail @@ -59,177 +75,184 @@ def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): if substrateFun: return substrateFun( - self._createComponent(asn1Spec, tagSet, ''), - substrate, length - ) + self._createComponent(asn1Spec, tagSet, ''), + substrate, length + ) value, substrate = decodeFun(substrate, asn1Spec, tagSet, length) - terminator, substrate = decodeFun(substrate) - if eoo.endOfOctets.isSameTypeWith(terminator) and \ - terminator == eoo.endOfOctets: + terminator, substrate = decodeFun(substrate, allowEoo=True) + if terminator is eoo.endOfOctets: return value, substrate else: raise error.PyAsn1Error('Missing end-of-octets terminator') + explicitTagDecoder = ExplicitTagDecoder() + class IntegerDecoder(AbstractSimpleDecoder): protoComponent = univ.Integer(0) - precomputedValues = { - '\x00': 0, - '\x01': 1, - '\x02': 2, - '\x03': 3, - '\x04': 4, - '\x05': 5, - '\x06': 6, - '\x07': 7, - '\x08': 8, - '\x09': 9, - '\xff': -1, - '\xfe': -2, - '\xfd': -3, - '\xfc': -4, - '\xfb': -5 - } - + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] + if not head: return self._createComponent(asn1Spec, tagSet, 0), tail - if head in self.precomputedValues: - value = self.precomputedValues[head] - else: - firstOctet = oct2int(head[0]) - if firstOctet & 0x80: - value = -1 - else: - value = 0 - for octet in head: - value = value << 8 | oct2int(octet) + + value = from_bytes(head, signed=True) + return self._createComponent(asn1Spec, tagSet, value), tail + class BooleanDecoder(IntegerDecoder): protoComponent = univ.Boolean(0) + def _createComponent(self, asn1Spec, tagSet, value=None): return IntegerDecoder._createComponent(self, asn1Spec, tagSet, value and 1 or 0) + class BitStringDecoder(AbstractSimpleDecoder): protoComponent = univ.BitString(()) tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + supportConstructedForm = True + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] - if tagSet[0][1] == tag.tagFormatSimple: # XXX what tag to check? + if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check? if not head: raise error.PyAsn1Error('Empty substrate') trailingBits = oct2int(head[0]) if trailingBits > 7: raise error.PyAsn1Error( 'Trailing bits overflow %s' % trailingBits - ) + ) head = head[1:] - lsb = p = 0; l = len(head)-1; b = () - while p <= l: - if p == l: - lsb = trailingBits - j = 7 - o = oct2int(head[p]) - while j >= lsb: - b = b + ((o>>j)&0x01,) - j = j - 1 - p = p + 1 - return self._createComponent(asn1Spec, tagSet, b), tail - r = self._createComponent(asn1Spec, tagSet, ()) + value = self.protoComponent.fromOctetString(head, trailingBits) + return self._createComponent(asn1Spec, tagSet, value), tail + + if not self.supportConstructedForm: + raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__) + + bitString = self._createComponent(asn1Spec, tagSet) + if substrateFun: - return substrateFun(r, substrate, length) + return substrateFun(bitString, substrate, length) + while head: - component, head = decodeFun(head) - r = r + component - return r, tail + component, head = decodeFun(head, self.protoComponent) + bitString += component + + return bitString, tail def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): - r = self._createComponent(asn1Spec, tagSet, '') + bitString = self._createComponent(asn1Spec, tagSet) + if substrateFun: - return substrateFun(r, substrate, length) + return substrateFun(bitString, substrate, length) + while substrate: - component, substrate = decodeFun(substrate) - if eoo.endOfOctets.isSameTypeWith(component) and \ - component == eoo.endOfOctets: + component, substrate = decodeFun(substrate, self.protoComponent, allowEoo=True) + if component is eoo.endOfOctets: break - r = r + component + + bitString += component + else: - raise error.SubstrateUnderrunError( - 'No EOO seen before substrate ends' - ) - return r, substrate + raise error.SubstrateUnderrunError('No EOO seen before substrate ends') + + return bitString, substrate + class OctetStringDecoder(AbstractSimpleDecoder): protoComponent = univ.OctetString('') tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + supportConstructedForm = True + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] - if tagSet[0][1] == tag.tagFormatSimple: # XXX what tag to check? - return self._createComponent(asn1Spec, tagSet, head), tail - r = self._createComponent(asn1Spec, tagSet, '') + if substrateFun: - return substrateFun(r, substrate, length) + return substrateFun(self._createComponent(asn1Spec, tagSet), + substrate, length) + + if tagSet[0].tagFormat == tag.tagFormatSimple: # XXX what tag to check? + return self._createComponent(asn1Spec, tagSet, head), tail + + if not self.supportConstructedForm: + raise error.PyAsn1Error('Constructed encoding form prohibited at %s' % self.__class__.__name__) + + # All inner fragments are of the same type, treat them as octet string + substrateFun = self.substrateCollector + + header = null + while head: - component, head = decodeFun(head) - r = r + component - return r, tail + component, head = decodeFun(head, self.protoComponent, + substrateFun=substrateFun) + header += component + + return self._createComponent(asn1Spec, tagSet, header), tail def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): - r = self._createComponent(asn1Spec, tagSet, '') - if substrateFun: - return substrateFun(r, substrate, length) + if substrateFun and substrateFun is not self.substrateCollector: + asn1Object = self._createComponent(asn1Spec, tagSet) + return substrateFun(asn1Object, substrate, length) + + # All inner fragments are of the same type, treat them as octet string + substrateFun = self.substrateCollector + + header = null + while substrate: - component, substrate = decodeFun(substrate) - if eoo.endOfOctets.isSameTypeWith(component) and \ - component == eoo.endOfOctets: + component, substrate = decodeFun(substrate, + self.protoComponent, + substrateFun=substrateFun, + allowEoo=True) + if component is eoo.endOfOctets: break - r = r + component + header += component else: raise error.SubstrateUnderrunError( 'No EOO seen before substrate ends' - ) - return r, substrate + ) + return self._createComponent(asn1Spec, tagSet, header), substrate + class NullDecoder(AbstractSimpleDecoder): protoComponent = univ.Null('') + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] - r = self._createComponent(asn1Spec, tagSet) + component = self._createComponent(asn1Spec, tagSet) if head: raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length) - return r, tail + return component, tail + class ObjectIdentifierDecoder(AbstractSimpleDecoder): protoComponent = univ.ObjectIdentifier(()) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] if not head: raise error.PyAsn1Error('Empty substrate') - # Get the first subid - subId = oct2int(head[0]) - oid = divmod(subId, 40) + head = octs2ints(head) - index = 1 + oid = () + index = 0 substrateLen = len(head) while index < substrateLen: - subId = oct2int(head[index]) - index = index + 1 - if subId == 128: - # ASN.1 spec forbids leading zeros (0x80) in sub-ID OID - # encoding, tolerating it opens a vulnerability. - # See http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf page 7 - raise error.PyAsn1Error('Invalid leading 0x80 in sub-OID') + subId = head[index] + index += 1 + if subId < 128: + oid = oid + (subId,) elif subId > 128: # Construct subid from a number of octets nextSubId = subId @@ -239,44 +262,77 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, if index >= substrateLen: raise error.SubstrateUnderrunError( 'Short substrate for sub-OID past %s' % (oid,) - ) - nextSubId = oct2int(head[index]) - index = index + 1 - subId = (subId << 7) + nextSubId - oid = oid + (subId,) + ) + nextSubId = head[index] + index += 1 + oid += ((subId << 7) + nextSubId,) + elif subId == 128: + # ASN.1 spec forbids leading zeros (0x80) in OID + # encoding, tolerating it opens a vulnerability. See + # http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf + # page 7 + raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding') + + # Decode two leading arcs + if 0 <= oid[0] <= 39: + oid = (0,) + oid + elif 40 <= oid[0] <= 79: + oid = (1, oid[0] - 40) + oid[1:] + elif oid[0] >= 80: + oid = (2, oid[0] - 80) + oid[1:] + else: + raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0]) + return self._createComponent(asn1Spec, tagSet, oid), tail + class RealDecoder(AbstractSimpleDecoder): protoComponent = univ.Real() + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] if not head: return self._createComponent(asn1Spec, tagSet, 0.0), tail - fo = oct2int(head[0]); head = head[1:] - if fo & 0x80: # binary enoding + fo = oct2int(head[0]) + head = head[1:] + if fo & 0x80: # binary encoding + if not head: + raise error.PyAsn1Error("Incomplete floating-point value") n = (fo & 0x03) + 1 if n == 4: n = oct2int(head[0]) + head = head[1:] eo, head = head[:n], head[n:] if not eo or not head: raise error.PyAsn1Error('Real exponent screwed') e = oct2int(eo[0]) & 0x80 and -1 or 0 - while eo: # exponent + while eo: # exponent e <<= 8 e |= oct2int(eo[0]) eo = eo[1:] + b = fo >> 4 & 0x03 # base bits + if b > 2: + raise error.PyAsn1Error('Illegal Real base') + if b == 1: # encbase = 8 + e *= 3 + elif b == 2: # encbase = 16 + e *= 4 p = 0 while head: # value p <<= 8 p |= oct2int(head[0]) head = head[1:] - if fo & 0x40: # sign bit + if fo & 0x40: # sign bit p = -p + sf = fo >> 2 & 0x03 # scale bits + p *= 2 ** sf value = (p, 2, e) elif fo & 0x40: # infinite value value = fo & 0x01 and '-inf' or 'inf' elif fo & 0xc0 == 0: # character encoding + if not head: + raise error.PyAsn1Error("Incomplete floating-point value") try: if fo & 0x3 == 0x1: # NR1 value = (int(head), 10, 0) @@ -287,180 +343,268 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, else: raise error.SubstrateUnderrunError( 'Unknown NR (tag %s)' % fo - ) + ) except ValueError: raise error.SubstrateUnderrunError( 'Bad character Real syntax' - ) + ) else: raise error.SubstrateUnderrunError( 'Unknown encoding (tag %s)' % fo - ) + ) return self._createComponent(asn1Spec, tagSet, value), tail - -class SequenceDecoder(AbstractConstructedDecoder): - protoComponent = univ.Sequence() - def _getComponentTagMap(self, r, idx): - try: - return r.getComponentTagMapNearPosition(idx) - except error.PyAsn1Error: - return - def _getComponentPositionByType(self, r, t, idx): - return r.getComponentPositionNearType(t, idx) - + +class SequenceAndSetDecoderBase(AbstractConstructedDecoder): + protoComponent = None + orderedComponents = False + + def _getComponentTagMap(self, asn1Object, idx): + raise NotImplementedError() + + def _getComponentPositionByType(self, asn1Object, tagSet, idx): + raise NotImplementedError() + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] - r = self._createComponent(asn1Spec, tagSet) - idx = 0 + asn1Object = self._createComponent(asn1Spec, tagSet) if substrateFun: - return substrateFun(r, substrate, length) - while head: - asn1Spec = self._getComponentTagMap(r, idx) - component, head = decodeFun(head, asn1Spec) - idx = self._getComponentPositionByType( - r, component.getEffectiveTagSet(), idx + return substrateFun(asn1Object, substrate, length) + + namedTypes = asn1Object.getComponentType() + + if not self.orderedComponents or not namedTypes or namedTypes.hasOptionalOrDefault: + seenIndices = set() + idx = 0 + while head: + asn1Spec = self._getComponentTagMap(asn1Object, idx) + component, head = decodeFun(head, asn1Spec) + idx = self._getComponentPositionByType( + asn1Object, component.effectiveTagSet, idx ) - r.setComponentByPosition(idx, component, asn1Spec is None) - idx = idx + 1 - r.setDefaultComponents() - r.verifySizeSpec() - return r, tail + + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + seenIndices.add(idx) + idx += 1 + + if namedTypes and not namedTypes.requiredComponents.issubset(seenIndices): + raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__) + else: + for idx, asn1Spec in enumerate(namedTypes.values()): + component, head = decodeFun(head, asn1Spec) + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + + if not namedTypes: + asn1Object.verifySizeSpec() + + return asn1Object, tail def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): - r = self._createComponent(asn1Spec, tagSet) + asn1Object = self._createComponent(asn1Spec, tagSet) if substrateFun: - return substrateFun(r, substrate, length) - idx = 0 - while substrate: - asn1Spec = self._getComponentTagMap(r, idx) - component, substrate = decodeFun(substrate, asn1Spec) - if eoo.endOfOctets.isSameTypeWith(component) and \ - component == eoo.endOfOctets: - break - idx = self._getComponentPositionByType( - r, component.getEffectiveTagSet(), idx - ) - r.setComponentByPosition(idx, component, asn1Spec is None) - idx = idx + 1 + return substrateFun(asn1Object, substrate, length) + + namedTypes = asn1Object.getComponentType() + + if not namedTypes or namedTypes.hasOptionalOrDefault: + seenIndices = set() + idx = 0 + while substrate: + asn1Spec = self._getComponentTagMap(asn1Object, idx) + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) + if component is eoo.endOfOctets: + break + idx = self._getComponentPositionByType( + asn1Object, component.effectiveTagSet, idx + ) + + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + seenIndices.add(idx) + idx += 1 + + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + + if namedTypes and not namedTypes.requiredComponents.issubset(seenIndices): + raise error.PyAsn1Error('ASN.1 object %s has uninitialized components' % asn1Object.__class__.__name__) else: - raise error.SubstrateUnderrunError( - 'No EOO seen before substrate ends' + for idx, asn1Spec in enumerate(namedTypes.values()): + component, substrate = decodeFun(substrate, asn1Spec) + + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False ) - r.setDefaultComponents() - r.verifySizeSpec() - return r, substrate + + component, substrate = decodeFun(substrate, eoo.endOfOctets, allowEoo=True) + if component is not eoo.endOfOctets: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + + if not namedTypes: + asn1Object.verifySizeSpec() + + return asn1Object, substrate + +class SequenceDecoder(SequenceAndSetDecoderBase): + protoComponent = univ.Sequence() + orderedComponents = True + + def _getComponentTagMap(self, asn1Object, idx): + try: + return asn1Object.getComponentTagMapNearPosition(idx) + except error.PyAsn1Error: + return + + def _getComponentPositionByType(self, asn1Object, tagSet, idx): + return asn1Object.getComponentPositionNearType(tagSet, idx) + class SequenceOfDecoder(AbstractConstructedDecoder): - protoComponent = univ.SequenceOf() + protoComponent = univ.SequenceOf() + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] - r = self._createComponent(asn1Spec, tagSet) + asn1Object = self._createComponent(asn1Spec, tagSet) if substrateFun: - return substrateFun(r, substrate, length) - asn1Spec = r.getComponentType() + return substrateFun(asn1Object, substrate, length) + asn1Spec = asn1Object.getComponentType() idx = 0 while head: component, head = decodeFun(head, asn1Spec) - r.setComponentByPosition(idx, component, asn1Spec is None) - idx = idx + 1 - r.verifySizeSpec() - return r, tail + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + idx += 1 + asn1Object.verifySizeSpec() + return asn1Object, tail def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): - r = self._createComponent(asn1Spec, tagSet) + asn1Object = self._createComponent(asn1Spec, tagSet) if substrateFun: - return substrateFun(r, substrate, length) - asn1Spec = r.getComponentType() + return substrateFun(asn1Object, substrate, length) + asn1Spec = asn1Object.getComponentType() idx = 0 while substrate: - component, substrate = decodeFun(substrate, asn1Spec) - if eoo.endOfOctets.isSameTypeWith(component) and \ - component == eoo.endOfOctets: + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) + if component is eoo.endOfOctets: break - r.setComponentByPosition(idx, component, asn1Spec is None) - idx = idx + 1 + asn1Object.setComponentByPosition( + idx, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False + ) + idx += 1 else: raise error.SubstrateUnderrunError( 'No EOO seen before substrate ends' - ) - r.verifySizeSpec() - return r, substrate + ) + asn1Object.verifySizeSpec() + return asn1Object, substrate + -class SetDecoder(SequenceDecoder): +class SetDecoder(SequenceAndSetDecoderBase): protoComponent = univ.Set() - def _getComponentTagMap(self, r, idx): - return r.getComponentTagMap() + orderedComponents = False - def _getComponentPositionByType(self, r, t, idx): - nextIdx = r.getComponentPositionByType(t) + def _getComponentTagMap(self, asn1Object, idx): + return asn1Object.componentTagMap + + def _getComponentPositionByType(self, asn1Object, tagSet, idx): + nextIdx = asn1Object.getComponentPositionByType(tagSet) if nextIdx is None: return idx else: return nextIdx - + + class SetOfDecoder(SequenceOfDecoder): protoComponent = univ.SetOf() - + + class ChoiceDecoder(AbstractConstructedDecoder): protoComponent = univ.Choice() tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] - r = self._createComponent(asn1Spec, tagSet) + asn1Object = self._createComponent(asn1Spec, tagSet) if substrateFun: - return substrateFun(r, substrate, length) - if r.getTagSet() == tagSet: # explicitly tagged Choice + return substrateFun(asn1Object, substrate, length) + if asn1Object.tagSet == tagSet: # explicitly tagged Choice component, head = decodeFun( - head, r.getComponentTagMap() - ) + head, asn1Object.componentTagMap + ) else: component, head = decodeFun( - head, r.getComponentTagMap(), tagSet, length, state - ) - if isinstance(component, univ.Choice): - effectiveTagSet = component.getEffectiveTagSet() - else: - effectiveTagSet = component.getTagSet() - r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None) - return r, tail + head, asn1Object.componentTagMap, tagSet, length, state + ) + effectiveTagSet = component.effectiveTagSet + asn1Object.setComponentByType( + effectiveTagSet, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False, + innerFlag=False + ) + return asn1Object, tail def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, - length, state, decodeFun, substrateFun): - r = self._createComponent(asn1Spec, tagSet) + length, state, decodeFun, substrateFun): + asn1Object = self._createComponent(asn1Spec, tagSet) if substrateFun: - return substrateFun(r, substrate, length) - if r.getTagSet() == tagSet: # explicitly tagged Choice - component, substrate = decodeFun(substrate, r.getComponentTagMap()) - eooMarker, substrate = decodeFun(substrate) # eat up EOO marker - if not eoo.endOfOctets.isSameTypeWith(eooMarker) or \ - eooMarker != eoo.endOfOctets: + return substrateFun(asn1Object, substrate, length) + if asn1Object.tagSet == tagSet: # explicitly tagged Choice + component, substrate = decodeFun(substrate, asn1Object.componentTagMap) + # eat up EOO marker + eooMarker, substrate = decodeFun(substrate, allowEoo=True) + if eooMarker is not eoo.endOfOctets: raise error.PyAsn1Error('No EOO seen before substrate ends') else: - component, substrate= decodeFun( - substrate, r.getComponentTagMap(), tagSet, length, state + component, substrate = decodeFun( + substrate, asn1Object.componentTagMap, tagSet, length, state ) - if isinstance(component, univ.Choice): - effectiveTagSet = component.getEffectiveTagSet() - else: - effectiveTagSet = component.getTagSet() - r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None) - return r, substrate + effectiveTagSet = component.effectiveTagSet + asn1Object.setComponentByType( + effectiveTagSet, component, + verifyConstraints=False, + matchTags=False, matchConstraints=False, + innerFlag=False + ) + return asn1Object, substrate + class AnyDecoder(AbstractSimpleDecoder): protoComponent = univ.Any() tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): - if asn1Spec is None or \ - asn1Spec is not None and tagSet != asn1Spec.getTagSet(): + if asn1Spec is None or asn1Spec is not None and tagSet != asn1Spec.tagSet: # untagged Any container, recover inner header substrate - length = length + len(fullSubstrate) - len(substrate) + length += len(fullSubstrate) - len(substrate) substrate = fullSubstrate if substrateFun: return substrateFun(self._createComponent(asn1Spec, tagSet), @@ -470,64 +614,99 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): - if asn1Spec is not None and tagSet == asn1Spec.getTagSet(): + if asn1Spec is not None and tagSet == asn1Spec.tagSet: # tagged Any type -- consume header substrate - header = '' + header = null else: # untagged Any, recover header substrate header = fullSubstrate[:-len(substrate)] - r = self._createComponent(asn1Spec, tagSet, header) - # Any components do not inherit initial tag asn1Spec = self.protoComponent - - if substrateFun: - return substrateFun(r, substrate, length) + + if substrateFun and substrateFun is not self.substrateCollector: + asn1Object = self._createComponent(asn1Spec, tagSet) + return substrateFun(asn1Object, header + substrate, length + len(header)) + + # All inner fragments are of the same type, treat them as octet string + substrateFun = self.substrateCollector + while substrate: - component, substrate = decodeFun(substrate, asn1Spec) - if eoo.endOfOctets.isSameTypeWith(component) and \ - component == eoo.endOfOctets: + component, substrate = decodeFun(substrate, asn1Spec, + substrateFun=substrateFun, + allowEoo=True) + if component is eoo.endOfOctets: break - r = r + component + header += component else: raise error.SubstrateUnderrunError( 'No EOO seen before substrate ends' - ) - return r, substrate + ) + if substrateFun: + return header, substrate + else: + return self._createComponent(asn1Spec, tagSet, header), substrate + # character string types class UTF8StringDecoder(OctetStringDecoder): protoComponent = char.UTF8String() + + class NumericStringDecoder(OctetStringDecoder): protoComponent = char.NumericString() + + class PrintableStringDecoder(OctetStringDecoder): protoComponent = char.PrintableString() + + class TeletexStringDecoder(OctetStringDecoder): protoComponent = char.TeletexString() + + class VideotexStringDecoder(OctetStringDecoder): protoComponent = char.VideotexString() + + class IA5StringDecoder(OctetStringDecoder): protoComponent = char.IA5String() + + class GraphicStringDecoder(OctetStringDecoder): protoComponent = char.GraphicString() + + class VisibleStringDecoder(OctetStringDecoder): protoComponent = char.VisibleString() + + class GeneralStringDecoder(OctetStringDecoder): protoComponent = char.GeneralString() + + class UniversalStringDecoder(OctetStringDecoder): protoComponent = char.UniversalString() + + class BMPStringDecoder(OctetStringDecoder): protoComponent = char.BMPString() + # "useful" types +class ObjectDescriptorDecoder(OctetStringDecoder): + protoComponent = useful.ObjectDescriptor() + + class GeneralizedTimeDecoder(OctetStringDecoder): protoComponent = useful.GeneralizedTime() + + class UTCTimeDecoder(OctetStringDecoder): protoComponent = useful.UTCTime() + tagMap = { - eoo.endOfOctets.tagSet: EndOfOctetsDecoder(), univ.Integer.tagSet: IntegerDecoder(), univ.Boolean.tagSet: BooleanDecoder(), univ.BitString.tagSet: BitStringDecoder(), @@ -537,8 +716,8 @@ class UTCTimeDecoder(OctetStringDecoder): univ.Enumerated.tagSet: IntegerDecoder(), univ.Real.tagSet: RealDecoder(), univ.Sequence.tagSet: SequenceDecoder(), # conflicts with SequenceOf - univ.Set.tagSet: SetDecoder(), # conflicts with SetOf - univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any + univ.Set.tagSet: SetDecoder(), # conflicts with SetOf + univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any # character string types char.UTF8String.tagSet: UTF8StringDecoder(), char.NumericString.tagSet: NumericStringDecoder(), @@ -552,9 +731,10 @@ class UTCTimeDecoder(OctetStringDecoder): char.UniversalString.tagSet: UniversalStringDecoder(), char.BMPString.tagSet: BMPStringDecoder(), # useful types + useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(), useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(), useful.UTCTime.tagSet: UTCTimeDecoder() - } +} # Type-to-codec map for ambiguous ASN.1 types typeMap = { @@ -564,114 +744,146 @@ class UTCTimeDecoder(OctetStringDecoder): univ.SequenceOf.typeId: SequenceOfDecoder(), univ.Choice.typeId: ChoiceDecoder(), univ.Any.typeId: AnyDecoder() - } +} + +# Put in non-ambiguous types for faster codec lookup +for typeDecoder in tagMap.values(): + typeId = typeDecoder.protoComponent.__class__.typeId + if typeId is not None and typeId not in typeMap: + typeMap[typeId] = typeDecoder -( stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec, - stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue, - stDumpRawValue, stErrorCondition, stStop ) = [x for x in range(10)] -class Decoder: +(stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec, + stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue, + stDumpRawValue, stErrorCondition, stStop) = [x for x in range(10)] + + +class Decoder(object): defaultErrorState = stErrorCondition -# defaultErrorState = stDumpRawValue + # defaultErrorState = stDumpRawValue defaultRawDecoder = AnyDecoder() + supportIndefLength = True + + # noinspection PyDefaultArgument def __init__(self, tagMap, typeMap={}): self.__tagMap = tagMap self.__typeMap = typeMap - self.__endOfOctetsTagSet = eoo.endOfOctets.getTagSet() # Tag & TagSet objects caches self.__tagCache = {} self.__tagSetCache = {} - + self.__eooSentinel = ints2octs((0, 0)) + def __call__(self, substrate, asn1Spec=None, tagSet=None, - length=None, state=stDecodeTag, recursiveFlag=1, - substrateFun=None): - if debug.logger & debug.flagDecoder: + length=None, state=stDecodeTag, recursiveFlag=True, + substrateFun=None, allowEoo=False): + if debug.logger and debug.logger & debug.flagDecoder: debug.logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate))) + + substrate = ensureString(substrate) + + # Look for end-of-octets sentinel + if allowEoo and self.supportIndefLength: + if substrate.startswith(self.__eooSentinel): + debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets sentinel found') + return eoo.endOfOctets, substrate[2:] + + value = base.noValue + fullSubstrate = substrate while state != stStop: if state == stDecodeTag: - # Decode tag if not substrate: raise error.SubstrateUnderrunError( 'Short octet stream on tag decoding' - ) - if not isOctetsType(substrate) and \ - not isinstance(substrate, univ.OctetString): - raise error.PyAsn1Error('Bad octet stream type') - + ) + # Decode tag + isShortTag = True firstOctet = substrate[0] substrate = substrate[1:] - if firstOctet in self.__tagCache: + try: lastTag = self.__tagCache[firstOctet] - else: - t = oct2int(firstOctet) - tagClass = t&0xC0 - tagFormat = t&0x20 - tagId = t&0x1F + except KeyError: + integerTag = oct2int(firstOctet) + tagClass = integerTag & 0xC0 + tagFormat = integerTag & 0x20 + tagId = integerTag & 0x1F if tagId == 0x1F: + isShortTag = False + lengthOctetIdx = 0 tagId = 0 - while 1: - if not substrate: - raise error.SubstrateUnderrunError( - 'Short octet stream on long tag decoding' - ) - t = oct2int(substrate[0]) - tagId = tagId << 7 | (t&0x7F) - substrate = substrate[1:] - if not t&0x80: - break + try: + while True: + integerTag = oct2int(substrate[lengthOctetIdx]) + lengthOctetIdx += 1 + tagId <<= 7 + tagId |= (integerTag & 0x7F) + if not integerTag & 0x80: + break + substrate = substrate[lengthOctetIdx:] + except IndexError: + raise error.SubstrateUnderrunError( + 'Short octet stream on long tag decoding' + ) lastTag = tag.Tag( tagClass=tagClass, tagFormat=tagFormat, tagId=tagId - ) - if tagId < 31: + ) + if isShortTag: # cache short tags self.__tagCache[firstOctet] = lastTag if tagSet is None: - if firstOctet in self.__tagSetCache: - tagSet = self.__tagSetCache[firstOctet] + if isShortTag: + try: + tagSet = self.__tagSetCache[firstOctet] + except KeyError: + # base tag not recovered + tagSet = tag.TagSet((), lastTag) + self.__tagSetCache[firstOctet] = tagSet else: - # base tag not recovered tagSet = tag.TagSet((), lastTag) - if firstOctet in self.__tagCache: - self.__tagSetCache[firstOctet] = tagSet else: tagSet = lastTag + tagSet state = stDecodeLength - debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %r, decoding length' % tagSet) + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'tag decoded into %s, decoding length' % tagSet) if state == stDecodeLength: # Decode length if not substrate: - raise error.SubstrateUnderrunError( - 'Short octet stream on length decoding' - ) - firstOctet = oct2int(substrate[0]) - if firstOctet == 128: + raise error.SubstrateUnderrunError( + 'Short octet stream on length decoding' + ) + firstOctet = oct2int(substrate[0]) + if firstOctet < 128: + size = 1 + length = firstOctet + elif firstOctet == 128: size = 1 length = -1 - elif firstOctet < 128: - length, size = firstOctet, 1 else: size = firstOctet & 0x7F # encoded in size bytes - length = 0 - lengthString = substrate[1:size+1] + encodedLength = octs2ints(substrate[1:size + 1]) # missing check on maximum size, which shouldn't be a # problem, we can handle more than is possible - if len(lengthString) != size: + if len(encodedLength) != size: raise error.SubstrateUnderrunError( - '%s<%s at %s' % - (size, len(lengthString), tagSet) - ) - for char in lengthString: - length = (length << 8) | oct2int(char) - size = size + 1 - substrate = substrate[size:] - if length != -1 and len(substrate) < length: - raise error.SubstrateUnderrunError( - '%d-octet short' % (length - len(substrate)) + '%s<%s at %s' % (size, len(encodedLength), tagSet) ) + length = 0 + for lengthOctet in encodedLength: + length <<= 8 + length |= lengthOctet + size += 1 + substrate = substrate[size:] + if length == -1: + if not self.supportIndefLength: + raise error.PyAsn1Error('Indefinite length encoding not supported by this codec') + else: + if len(substrate) < length: + raise error.SubstrateUnderrunError('%d-octet short' % (length - len(substrate))) state = stGetValueDecoder - debug.logger and debug.logger & debug.flagDecoder and debug.logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length]))) + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length])) + ) if state == stGetValueDecoder: if asn1Spec is None: state = stGetValueDecoderByTag @@ -692,19 +904,18 @@ def __call__(self, substrate, asn1Spec=None, tagSet=None, # in an incremental, tag-by-tag fashion (this is the case of # EXPLICIT tag which is most basic). Outermost tag comes first # from the wire. - # + # if state == stGetValueDecoderByTag: - if tagSet in self.__tagMap: + try: concreteDecoder = self.__tagMap[tagSet] - else: + except KeyError: concreteDecoder = None if concreteDecoder: state = stDecodeValue else: - _k = tagSet[:1] - if _k in self.__tagMap: - concreteDecoder = self.__tagMap[_k] - else: + try: + concreteDecoder = self.__tagMap[tagSet[:1]] + except KeyError: concreteDecoder = None if concreteDecoder: state = stDecodeValue @@ -712,96 +923,125 @@ def __call__(self, substrate, asn1Spec=None, tagSet=None, state = stTryAsExplicitTag if debug.logger and debug.logger & debug.flagDecoder: debug.logger('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state == stDecodeValue and 'value' or 'as explicit tag')) - debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__) + debug.scope.push( + concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__) if state == stGetValueDecoderByAsn1Spec: - if isinstance(asn1Spec, (dict, tagmap.TagMap)): - if tagSet in asn1Spec: - __chosenSpec = asn1Spec[tagSet] - else: - __chosenSpec = None + if asn1Spec.__class__ is dict or asn1Spec.__class__ is tagmap.TagMap: + try: + chosenSpec = asn1Spec[tagSet] + except KeyError: + chosenSpec = None if debug.logger and debug.logger & debug.flagDecoder: debug.logger('candidate ASN.1 spec is a map of:') - for t, v in asn1Spec.getPosMap().items(): - debug.logger(' %r -> %s' % (t, v.__class__.__name__)) - if asn1Spec.getNegMap(): + for firstOctet, v in asn1Spec.presentTypes.items(): + debug.logger(' %s -> %s' % (firstOctet, v.__class__.__name__)) + if asn1Spec.skipTypes: debug.logger('but neither of: ') - for i in asn1Spec.getNegMap().items(): - debug.logger(' %r -> %s' % (t, v.__class__.__name__)) - debug.logger('new candidate ASN.1 spec is %s, chosen by %r' % (__chosenSpec is None and '' or __chosenSpec.__class__.__name__, tagSet)) + for firstOctet, v in asn1Spec.skipTypes.items(): + debug.logger(' %s -> %s' % (firstOctet, v.__class__.__name__)) + debug.logger('new candidate ASN.1 spec is %s, chosen by %s' % (chosenSpec is None and '' or chosenSpec.prettyPrintType(), tagSet)) else: - __chosenSpec = asn1Spec - debug.logger and debug.logger & debug.flagDecoder and debug.logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__) - if __chosenSpec is not None and ( - tagSet == __chosenSpec.getTagSet() or \ - tagSet in __chosenSpec.getTagMap() - ): - # use base type for codec lookup to recover untagged types - baseTagSet = __chosenSpec.baseTagSet - if __chosenSpec.typeId is not None and \ - __chosenSpec.typeId in self.__typeMap: - # ambiguous type - concreteDecoder = self.__typeMap[__chosenSpec.typeId] - debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen for an ambiguous type by type ID %s' % (__chosenSpec.typeId,)) - elif baseTagSet in self.__tagMap: - # base type or tagged subtype - concreteDecoder = self.__tagMap[baseTagSet] - debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %r' % (baseTagSet,)) + if tagSet == asn1Spec.tagSet or tagSet in asn1Spec.tagMap: + chosenSpec = asn1Spec + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__) else: - concreteDecoder = None + chosenSpec = None + + if chosenSpec is not None: + try: + # ambiguous type or just faster codec lookup + concreteDecoder = self.__typeMap[chosenSpec.typeId] + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'value decoder chosen for an ambiguous type by type ID %s' % (chosenSpec.typeId,)) + except KeyError: + # use base type for codec lookup to recover untagged types + baseTagSet = tag.TagSet(chosenSpec.tagSet.baseTag, chosenSpec.tagSet.baseTag) + try: + # base type or tagged subtype + concreteDecoder = self.__tagMap[baseTagSet] + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'value decoder chosen by base %s' % (baseTagSet,)) + except KeyError: + concreteDecoder = None if concreteDecoder: - asn1Spec = __chosenSpec + asn1Spec = chosenSpec state = stDecodeValue else: state = stTryAsExplicitTag - elif tagSet == self.__endOfOctetsTagSet: - concreteDecoder = self.__tagMap[tagSet] - state = stDecodeValue - debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets found') else: concreteDecoder = None state = stTryAsExplicitTag if debug.logger and debug.logger & debug.flagDecoder: debug.logger('codec %s chosen by ASN.1 spec, decoding %s' % (state == stDecodeValue and concreteDecoder.__class__.__name__ or "", state == stDecodeValue and 'value' or 'as explicit tag')) - debug.scope.push(__chosenSpec is None and '?' or __chosenSpec.__class__.__name__) + debug.scope.push(chosenSpec is None and '?' or chosenSpec.__class__.__name__) if state == stTryAsExplicitTag: - if tagSet and \ - tagSet[0][1] == tag.tagFormatConstructed and \ - tagSet[0][0] != tag.tagClassUniversal: + if tagSet and tagSet[0].tagFormat == tag.tagFormatConstructed and tagSet[0].tagClass != tag.tagClassUniversal: # Assume explicit tagging concreteDecoder = explicitTagDecoder state = stDecodeValue - else: + else: concreteDecoder = None state = self.defaultErrorState debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state == stDecodeValue and 'value' or 'as failure')) if state == stDumpRawValue: concreteDecoder = self.defaultRawDecoder - debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__) + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'codec %s chosen, decoding value' % concreteDecoder.__class__.__name__) state = stDecodeValue if state == stDecodeValue: - if recursiveFlag == 0 and not substrateFun: # legacy - substrateFun = lambda a,b,c: (a,b[:c]) + if not recursiveFlag and not substrateFun: # legacy + def substrateFun(a, b, c): + return a, b[:c] if length == -1: # indef length value, substrate = concreteDecoder.indefLenValueDecoder( fullSubstrate, substrate, asn1Spec, tagSet, length, stGetValueDecoder, self, substrateFun - ) + ) else: value, substrate = concreteDecoder.valueDecoder( fullSubstrate, substrate, asn1Spec, tagSet, length, stGetValueDecoder, self, substrateFun - ) + ) state = stStop - debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or '')) + debug.logger and debug.logger & debug.flagDecoder and debug.logger( + 'codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or '')) if state == stErrorCondition: raise error.PyAsn1Error( - '%r not in asn1Spec: %r' % (tagSet, asn1Spec) - ) + '%s not in asn1Spec: %s' % (tagSet, asn1Spec) + ) if debug.logger and debug.logger & debug.flagDecoder: debug.scope.pop() debug.logger('decoder left scope %s, call completed' % debug.scope) return value, substrate - + + +#: Turns BER octet stream into an ASN.1 object. +#: +#: Takes BER octetstream and decode it into an ASN.1 object +#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which +#: may be a scalar or an arbitrary nested structure. +#: +#: Parameters +#: ---------- +#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: BER octetstream +#: +#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative +#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure +#: being decoded, *asn1Spec* may or may not be required. Most common reason for +#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode. +#: +#: Returns +#: ------- +#: : :py:class:`tuple` +#: A tuple of pyasn1 object recovered from BER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: and the unprocessed trailing portion of the *substrate* (may be empty) +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On decoding errors decode = Decoder(tagMap, typeMap) # XXX diff --git a/src/lib/pyasn1/codec/ber/encoder.py b/src/lib/pyasn1/codec/ber/encoder.py index 173949d0b..2bf2bc7f1 100644 --- a/src/lib/pyasn1/codec/ber/encoder.py +++ b/src/lib/pyasn1/codec/ber/encoder.py @@ -1,229 +1,319 @@ -# BER encoder +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# from pyasn1.type import base, tag, univ, char, useful from pyasn1.codec.ber import eoo from pyasn1.compat.octets import int2oct, oct2int, ints2octs, null, str2octs +from pyasn1.compat.integer import to_bytes from pyasn1 import debug, error -class Error(Exception): pass +__all__ = ['encode'] -class AbstractItemEncoder: + +class AbstractItemEncoder(object): supportIndefLenMode = 1 - def encodeTag(self, t, isConstructed): - tagClass, tagFormat, tagId = t.asTuple() # this is a hotspot - v = tagClass | tagFormat + + # noinspection PyMethodMayBeStatic + def encodeTag(self, singleTag, isConstructed): + tagClass, tagFormat, tagId = singleTag + encodedTag = tagClass | tagFormat if isConstructed: - v = v|tag.tagFormatConstructed + encodedTag |= tag.tagFormatConstructed if tagId < 31: - return int2oct(v|tagId) + return (encodedTag | tagId,) else: - s = int2oct(tagId&0x7f) - tagId = tagId >> 7 + substrate = (tagId & 0x7f,) + tagId >>= 7 while tagId: - s = int2oct(0x80|(tagId&0x7f)) + s - tagId = tagId >> 7 - return int2oct(v|0x1F) + s + substrate = (0x80 | (tagId & 0x7f),) + substrate + tagId >>= 7 + return (encodedTag | 0x1F,) + substrate def encodeLength(self, length, defMode): if not defMode and self.supportIndefLenMode: - return int2oct(0x80) + return (0x80,) if length < 0x80: - return int2oct(length) + return (length,) else: - substrate = null + substrate = () while length: - substrate = int2oct(length&0xff) + substrate - length = length >> 8 + substrate = (length & 0xff,) + substrate + length >>= 8 substrateLen = len(substrate) if substrateLen > 126: - raise Error('Length octets overflow (%d)' % substrateLen) - return int2oct(0x80 | substrateLen) + substrate + raise error.PyAsn1Error('Length octets overflow (%d)' % substrateLen) + return (0x80 | substrateLen,) + substrate def encodeValue(self, encodeFun, value, defMode, maxChunkSize): - raise Error('Not implemented') + raise error.PyAsn1Error('Not implemented') def _encodeEndOfOctets(self, encodeFun, defMode): if defMode or not self.supportIndefLenMode: return null else: return encodeFun(eoo.endOfOctets, defMode) - + def encode(self, encodeFun, value, defMode, maxChunkSize): - substrate, isConstructed = self.encodeValue( + substrate, isConstructed, isOctets = self.encodeValue( encodeFun, value, defMode, maxChunkSize - ) - tagSet = value.getTagSet() + ) + tagSet = value.tagSet + # tagged value? if tagSet: if not isConstructed: # primitive form implies definite mode - defMode = 1 - return self.encodeTag( - tagSet[-1], isConstructed - ) + self.encodeLength( - len(substrate), defMode - ) + substrate + self._encodeEndOfOctets(encodeFun, defMode) - else: - return substrate # untagged value + defMode = True + header = self.encodeTag(tagSet[-1], isConstructed) + header += self.encodeLength(len(substrate), defMode) + + if isOctets: + substrate = ints2octs(header) + substrate + else: + substrate = ints2octs(header + substrate) + + eoo = self._encodeEndOfOctets(encodeFun, defMode) + if eoo: + substrate += eoo + + return substrate + class EndOfOctetsEncoder(AbstractItemEncoder): def encodeValue(self, encodeFun, value, defMode, maxChunkSize): - return null, 0 + return null, False, True + class ExplicitlyTaggedItemEncoder(AbstractItemEncoder): def encodeValue(self, encodeFun, value, defMode, maxChunkSize): if isinstance(value, base.AbstractConstructedAsn1Item): - value = value.clone(tagSet=value.getTagSet()[:-1], - cloneValueFlag=1) + value = value.clone(tagSet=value.tagSet[:-1], cloneValueFlag=1) else: - value = value.clone(tagSet=value.getTagSet()[:-1]) - return encodeFun(value, defMode, maxChunkSize), 1 + value = value.clone(tagSet=value.tagSet[:-1]) + return encodeFun(value, defMode, maxChunkSize), True, True + explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder() + class BooleanEncoder(AbstractItemEncoder): - supportIndefLenMode = 0 - _true = ints2octs((1,)) - _false = ints2octs((0,)) + supportIndefLenMode = False + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): - return value and self._true or self._false, 0 + return value and (1,) or (0,), False, False + class IntegerEncoder(AbstractItemEncoder): - supportIndefLenMode = 0 + supportIndefLenMode = False supportCompactZero = False + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): - if value == 0: # shortcut for zero value + if value == 0: + # de-facto way to encode zero if self.supportCompactZero: - # this seems to be a correct way for encoding zeros - return null, 0 + return (), False, False else: - # this seems to be a widespread way for encoding zeros - return ints2octs((0,)), 0 - octets = [] - value = int(value) # to save on ops on asn1 type - while 1: - octets.insert(0, value & 0xff) - if value == 0 or value == -1: - break - value = value >> 8 - if value == 0 and octets[0] & 0x80: - octets.insert(0, 0) - while len(octets) > 1 and \ - (octets[0] == 0 and octets[1] & 0x80 == 0 or \ - octets[0] == 0xff and octets[1] & 0x80 != 0): - del octets[0] - return ints2octs(octets), 0 + return (0,), False, False + + return to_bytes(int(value), signed=True), False, True + class BitStringEncoder(AbstractItemEncoder): def encodeValue(self, encodeFun, value, defMode, maxChunkSize): - if not maxChunkSize or len(value) <= maxChunkSize*8: - r = {}; l = len(value); p = 0; j = 7 - while p < l: - i, j = divmod(p, 8) - r[i] = r.get(i,0) | value[p]<<(7-j) - p = p + 1 - keys = list(r); keys.sort() - return int2oct(7-j) + ints2octs([r[k] for k in keys]), 0 + valueLength = len(value) + if valueLength % 8: + alignedValue = value << (8 - valueLength % 8) else: - pos = 0; substrate = null - while 1: - # count in octets - v = value.clone(value[pos*8:pos*8+maxChunkSize*8]) - if not v: - break - substrate = substrate + encodeFun(v, defMode, maxChunkSize) - pos = pos + maxChunkSize - return substrate, 1 + alignedValue = value + + if not maxChunkSize or len(alignedValue) <= maxChunkSize * 8: + substrate = alignedValue.asOctets() + return int2oct(len(substrate) * 8 - valueLength) + substrate, False, True + + stop = 0 + substrate = null + while stop < valueLength: + start = stop + stop = min(start + maxChunkSize * 8, valueLength) + substrate += encodeFun(alignedValue[start:stop], defMode, maxChunkSize) + + return substrate, True, True + class OctetStringEncoder(AbstractItemEncoder): def encodeValue(self, encodeFun, value, defMode, maxChunkSize): if not maxChunkSize or len(value) <= maxChunkSize: - return value.asOctets(), 0 + return value.asOctets(), False, True else: - pos = 0; substrate = null - while 1: - v = value.clone(value[pos:pos+maxChunkSize]) + pos = 0 + substrate = null + while True: + v = value.clone(value[pos:pos + maxChunkSize]) if not v: break - substrate = substrate + encodeFun(v, defMode, maxChunkSize) - pos = pos + maxChunkSize - return substrate, 1 + substrate += encodeFun(v, defMode, maxChunkSize) + pos += maxChunkSize + + return substrate, True, True + class NullEncoder(AbstractItemEncoder): - supportIndefLenMode = 0 + supportIndefLenMode = False + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): - return null, 0 + return null, False, True + class ObjectIdentifierEncoder(AbstractItemEncoder): - supportIndefLenMode = 0 - precomputedValues = { - (1, 3, 6, 1, 2): (43, 6, 1, 2), - (1, 3, 6, 1, 4): (43, 6, 1, 4) - } - def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + supportIndefLenMode = False + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): oid = value.asTuple() - if oid[:5] in self.precomputedValues: - octets = self.precomputedValues[oid[:5]] - index = 5 + + # Build the first pair + try: + first = oid[0] + second = oid[1] + + except IndexError: + raise error.PyAsn1Error('Short OID %s' % (value,)) + + if 0 <= second <= 39: + if first == 1: + oid = (second + 40,) + oid[2:] + elif first == 0: + oid = (second,) + oid[2:] + elif first == 2: + oid = (second + 80,) + oid[2:] + else: + raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,)) + elif first == 2: + oid = (second + 80,) + oid[2:] else: - if len(oid) < 2: - raise error.PyAsn1Error('Short OID %s' % (value,)) - - # Build the first twos - if oid[0] > 6 or oid[1] > 39 or oid[0] == 6 and oid[1] > 15: - raise error.PyAsn1Error( - 'Initial sub-ID overflow %s in OID %s' % (oid[:2], value) - ) - octets = (oid[0] * 40 + oid[1],) - index = 2 - - # Cycle through subids - for subid in oid[index:]: - if subid > -1 and subid < 128: + raise error.PyAsn1Error('Impossible first/second arcs at %s' % (value,)) + + octets = () + + # Cycle through subIds + for subOid in oid: + if 0 <= subOid <= 127: # Optimize for the common case - octets = octets + (subid & 0x7f,) - elif subid < 0 or subid > 0xFFFFFFFF: - raise error.PyAsn1Error( - 'SubId overflow %s in %s' % (subid, value) - ) - else: + octets += (subOid,) + elif subOid > 127: # Pack large Sub-Object IDs - res = (subid & 0x7f,) - subid = subid >> 7 - while subid > 0: - res = (0x80 | (subid & 0x7f),) + res - subid = subid >> 7 + res = (subOid & 0x7f,) + subOid >>= 7 + while subOid: + res = (0x80 | (subOid & 0x7f),) + res + subOid >>= 7 # Add packed Sub-Object ID to resulted Object ID octets += res - - return ints2octs(octets), 0 + else: + raise error.PyAsn1Error('Negative OID arc %s at %s' % (subOid, value)) + + return octets, False, False + class RealEncoder(AbstractItemEncoder): supportIndefLenMode = 0 + binEncBase = 2 # set to None to choose encoding base automatically + + @staticmethod + def _dropFloatingPoint(m, encbase, e): + ms, es = 1, 1 + if m < 0: + ms = -1 # mantissa sign + if e < 0: + es = -1 # exponenta sign + m *= ms + if encbase == 8: + m *= 2 ** (abs(e) % 3 * es) + e = abs(e) // 3 * es + elif encbase == 16: + m *= 2 ** (abs(e) % 4 * es) + e = abs(e) // 4 * es + + while True: + if int(m) != m: + m *= encbase + e -= 1 + continue + break + return ms, int(m), encbase, e + + def _chooseEncBase(self, value): + m, b, e = value + encBase = [2, 8, 16] + if value.binEncBase in encBase: + return self._dropFloatingPoint(m, value.binEncBase, e) + elif self.binEncBase in encBase: + return self._dropFloatingPoint(m, self.binEncBase, e) + # auto choosing base 2/8/16 + mantissa = [m, m, m] + exponenta = [e, e, e] + sign = 1 + encbase = 2 + e = float('inf') + for i in range(3): + (sign, + mantissa[i], + encBase[i], + exponenta[i]) = self._dropFloatingPoint(mantissa[i], encBase[i], exponenta[i]) + if abs(exponenta[i]) < abs(e) or (abs(exponenta[i]) == abs(e) and mantissa[i] < m): + e = exponenta[i] + m = int(mantissa[i]) + encbase = encBase[i] + return sign, m, encbase, e + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): if value.isPlusInfinity(): - return int2oct(0x40), 0 + return (0x40,), False, False if value.isMinusInfinity(): - return int2oct(0x41), 0 + return (0x41,), False, False m, b, e = value if not m: - return null, 0 + return null, False, True if b == 10: - return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), 0 + return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), False, True elif b == 2: - fo = 0x80 # binary enoding - if m < 0: - fo = fo | 0x40 # sign bit - m = -m - while int(m) != m: # drop floating point - m *= 2 - e -= 1 - while m & 0x1 == 0: # mantissa normalization + fo = 0x80 # binary encoding + ms, m, encbase, e = self._chooseEncBase(value) + if ms < 0: # mantissa sign + fo |= 0x40 # sign bit + # exponenta & mantissa normalization + if encbase == 2: + while m & 0x1 == 0: + m >>= 1 + e += 1 + elif encbase == 8: + while m & 0x7 == 0: + m >>= 3 + e += 1 + fo |= 0x10 + else: # encbase = 16 + while m & 0xf == 0: + m >>= 4 + e += 1 + fo |= 0x20 + sf = 0 # scale factor + while m & 0x1 == 0: m >>= 1 - e += 1 + sf += 1 + if sf > 3: + raise error.PyAsn1Error('Scale factor overflow') # bug if raised + fo |= sf << 2 eo = null - while e not in (0, -1): - eo = int2oct(e&0xff) + eo - e >>= 8 - if e == 0 and eo and oct2int(eo[0]) & 0x80: - eo = int2oct(0) + eo + if e == 0 or e == -1: + eo = int2oct(e & 0xff) + else: + while e not in (0, -1): + eo = int2oct(e & 0xff) + eo + e >>= 8 + if e == 0 and eo and oct2int(eo[0]) & 0x80: + eo = int2oct(0) + eo + if e == -1 and eo and not (oct2int(eo[0]) & 0x80): + eo = int2oct(0xff) + eo n = len(eo) if n > 0xff: raise error.PyAsn1Error('Real exponent overflow') @@ -235,51 +325,54 @@ def encodeValue(self, encodeFun, value, defMode, maxChunkSize): fo |= 2 else: fo |= 3 - eo = int2oct(n//0xff+1) + eo + eo = int2oct(n & 0xff) + eo po = null while m: - po = int2oct(m&0xff) + po + po = int2oct(m & 0xff) + po m >>= 8 substrate = int2oct(fo) + eo + po - return substrate, 0 + return substrate, False, True else: raise error.PyAsn1Error('Prohibited Real base %s' % b) + class SequenceEncoder(AbstractItemEncoder): def encodeValue(self, encodeFun, value, defMode, maxChunkSize): - value.setDefaultComponents() value.verifySizeSpec() - substrate = null; idx = len(value) + namedTypes = value.getComponentType() + substrate = null + idx = len(value) while idx > 0: - idx = idx - 1 - if value[idx] is None: # Optional component - continue - component = value.getDefaultComponentByPosition(idx) - if component is not None and component == value[idx]: - continue - substrate = encodeFun( - value[idx], defMode, maxChunkSize - ) + substrate - return substrate, 1 + idx -= 1 + if namedTypes: + if namedTypes[idx].isOptional and not value[idx].isValue: + continue + if namedTypes[idx].isDefaulted and value[idx] == namedTypes[idx].asn1Object: + continue + substrate = encodeFun(value[idx], defMode, maxChunkSize) + substrate + return substrate, True, True + class SequenceOfEncoder(AbstractItemEncoder): def encodeValue(self, encodeFun, value, defMode, maxChunkSize): value.verifySizeSpec() - substrate = null; idx = len(value) + substrate = null + idx = len(value) while idx > 0: - idx = idx - 1 - substrate = encodeFun( - value[idx], defMode, maxChunkSize - ) + substrate - return substrate, 1 + idx -= 1 + substrate = encodeFun(value[idx], defMode, maxChunkSize) + substrate + return substrate, True, True + class ChoiceEncoder(AbstractItemEncoder): def encodeValue(self, encodeFun, value, defMode, maxChunkSize): - return encodeFun(value.getComponent(), defMode, maxChunkSize), 1 + return encodeFun(value.getComponent(), defMode, maxChunkSize), True, True + class AnyEncoder(OctetStringEncoder): def encodeValue(self, encodeFun, value, defMode, maxChunkSize): - return value.asOctets(), defMode == 0 + return value.asOctets(), defMode == False, True + tagMap = { eoo.endOfOctets.tagSet: EndOfOctetsEncoder(), @@ -308,46 +401,106 @@ def encodeValue(self, encodeFun, value, defMode, maxChunkSize): char.UniversalString.tagSet: OctetStringEncoder(), char.BMPString.tagSet: OctetStringEncoder(), # useful types + useful.ObjectDescriptor.tagSet: OctetStringEncoder(), useful.GeneralizedTime.tagSet: OctetStringEncoder(), - useful.UTCTime.tagSet: OctetStringEncoder() - } + useful.UTCTime.tagSet: OctetStringEncoder() +} -# Type-to-codec map for ambiguous ASN.1 types +# Put in ambiguous & non-ambiguous types for faster codec lookup typeMap = { + univ.Boolean.typeId: BooleanEncoder(), + univ.Integer.typeId: IntegerEncoder(), + univ.BitString.typeId: BitStringEncoder(), + univ.OctetString.typeId: OctetStringEncoder(), + univ.Null.typeId: NullEncoder(), + univ.ObjectIdentifier.typeId: ObjectIdentifierEncoder(), + univ.Enumerated.typeId: IntegerEncoder(), + univ.Real.typeId: RealEncoder(), + # Sequence & Set have same tags as SequenceOf & SetOf univ.Set.typeId: SequenceEncoder(), univ.SetOf.typeId: SequenceOfEncoder(), univ.Sequence.typeId: SequenceEncoder(), univ.SequenceOf.typeId: SequenceOfEncoder(), univ.Choice.typeId: ChoiceEncoder(), - univ.Any.typeId: AnyEncoder() - } + univ.Any.typeId: AnyEncoder(), + # character string types + char.UTF8String.typeId: OctetStringEncoder(), + char.NumericString.typeId: OctetStringEncoder(), + char.PrintableString.typeId: OctetStringEncoder(), + char.TeletexString.typeId: OctetStringEncoder(), + char.VideotexString.typeId: OctetStringEncoder(), + char.IA5String.typeId: OctetStringEncoder(), + char.GraphicString.typeId: OctetStringEncoder(), + char.VisibleString.typeId: OctetStringEncoder(), + char.GeneralString.typeId: OctetStringEncoder(), + char.UniversalString.typeId: OctetStringEncoder(), + char.BMPString.typeId: OctetStringEncoder(), + # useful types + useful.ObjectDescriptor.typeId: OctetStringEncoder(), + useful.GeneralizedTime.typeId: OctetStringEncoder(), + useful.UTCTime.typeId: OctetStringEncoder() +} + -class Encoder: +class Encoder(object): + supportIndefLength = True + + # noinspection PyDefaultArgument def __init__(self, tagMap, typeMap={}): self.__tagMap = tagMap self.__typeMap = typeMap - def __call__(self, value, defMode=1, maxChunkSize=0): - debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.__class__.__name__, value.prettyPrint())) - tagSet = value.getTagSet() + def __call__(self, value, defMode=True, maxChunkSize=0): + if not defMode and not self.supportIndefLength: + raise error.PyAsn1Error('Indefinite length encoding not supported by this codec') + debug.logger & debug.flagEncoder and debug.logger( + 'encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % ( + not defMode and 'in' or '', maxChunkSize, value.prettyPrintType(), value.prettyPrint())) + tagSet = value.tagSet if len(tagSet) > 1: concreteEncoder = explicitlyTaggedItemEncoder else: - if value.typeId is not None and value.typeId in self.__typeMap: + try: concreteEncoder = self.__typeMap[value.typeId] - elif tagSet in self.__tagMap: - concreteEncoder = self.__tagMap[tagSet] - else: - tagSet = value.baseTagSet - if tagSet in self.__tagMap: - concreteEncoder = self.__tagMap[tagSet] - else: - raise Error('No encoder for %s' % (value,)) - debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %r' % (concreteEncoder.__class__.__name__, tagSet)) + except KeyError: + # use base type for codec lookup to recover untagged types + baseTagSet = tag.TagSet(value.tagSet.baseTag, value.tagSet.baseTag) + try: + concreteEncoder = self.__tagMap[baseTagSet] + except KeyError: + raise error.PyAsn1Error('No encoder for %s' % (value,)) + debug.logger & debug.flagEncoder and debug.logger( + 'using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet)) substrate = concreteEncoder.encode( self, value, defMode, maxChunkSize - ) - debug.logger & debug.flagEncoder and debug.logger('built %s octets of substrate: %s\nencoder completed' % (len(substrate), debug.hexdump(substrate))) + ) + debug.logger & debug.flagEncoder and debug.logger( + 'built %s octets of substrate: %s\nencoder completed' % (len(substrate), debug.hexdump(substrate))) return substrate +#: Turns ASN.1 object into BER octet stream. +#: +#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: walks all its components recursively and produces a BER octet stream. +#: +#: Parameters +#: ---------- +# value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: A pyasn1 object to encode +#: +#: defMode: :py:class:`bool` +#: If `False`, produces indefinite length encoding +#: +#: maxChunkSize: :py:class:`int` +#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size) +#: +#: Returns +#: ------- +#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: Given ASN.1 object encoded into BER octetstream +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On encoding errors encode = Encoder(tagMap, typeMap) diff --git a/src/lib/pyasn1/codec/ber/eoo.py b/src/lib/pyasn1/codec/ber/eoo.py index 379be1996..b02f5cc43 100644 --- a/src/lib/pyasn1/codec/ber/eoo.py +++ b/src/lib/pyasn1/codec/ber/eoo.py @@ -1,8 +1,25 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# from pyasn1.type import base, tag + class EndOfOctets(base.AbstractSimpleAsn1Item): defaultValue = 0 tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00) - ) + ) + + _instance = None + + def __new__(cls, *args): + if cls._instance is None: + cls._instance = object.__new__(cls, *args) + + return cls._instance + + endOfOctets = EndOfOctets() diff --git a/src/lib/pyasn1/codec/cer/decoder.py b/src/lib/pyasn1/codec/cer/decoder.py index 9fd37c134..bf9cf4af9 100644 --- a/src/lib/pyasn1/codec/cer/decoder.py +++ b/src/lib/pyasn1/codec/cer/decoder.py @@ -1,16 +1,25 @@ -# CER decoder +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# from pyasn1.type import univ from pyasn1.codec.ber import decoder from pyasn1.compat.octets import oct2int from pyasn1 import error +__all__ = ['decode'] + + class BooleanDecoder(decoder.AbstractSimpleDecoder): protoComponent = univ.Boolean(0) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, state, decodeFun, substrateFun): head, tail = substrate[:length], substrate[length:] - if not head: - raise error.PyAsn1Error('Empty substrate') + if not head or length != 1: + raise error.PyAsn1Error('Not single-octet Boolean payload') byte = oct2int(head[0]) # CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while # BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1 @@ -20,16 +29,59 @@ def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, elif byte == 0x00: value = 0 else: - raise error.PyAsn1Error('Boolean CER violation: %s' % byte) + raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte) return self._createComponent(asn1Spec, tagSet, value), tail +# TODO: prohibit non-canonical encoding +BitStringDecoder = decoder.BitStringDecoder +OctetStringDecoder = decoder.OctetStringDecoder +RealDecoder = decoder.RealDecoder + tagMap = decoder.tagMap.copy() -tagMap.update({ - univ.Boolean.tagSet: BooleanDecoder() - }) +tagMap.update( + {univ.Boolean.tagSet: BooleanDecoder(), + univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: OctetStringDecoder(), + univ.Real.tagSet: RealDecoder()} +) + +typeMap = decoder.typeMap.copy() + +# Put in non-ambiguous types for faster codec lookup +for typeDecoder in tagMap.values(): + typeId = typeDecoder.protoComponent.__class__.typeId + if typeId is not None and typeId not in typeMap: + typeMap[typeId] = typeDecoder + -typeMap = decoder.typeMap +class Decoder(decoder.Decoder): + pass -class Decoder(decoder.Decoder): pass +#: Turns CER octet stream into an ASN.1 object. +#: +#: Takes CER octetstream and decode it into an ASN.1 object +#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which +#: may be a scalar or an arbitrary nested structure. +#: +#: Parameters +#: ---------- +#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: CER octetstream +#: +#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative +#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure +#: being decoded, *asn1Spec* may or may not be required. Most common reason for +#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode. +#: +#: Returns +#: ------- +#: : :py:class:`tuple` +#: A tuple of pyasn1 object recovered from CER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: and the unprocessed trailing portion of the *substrate* (may be empty) +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On decoding errors decode = Decoder(tagMap, decoder.typeMap) diff --git a/src/lib/pyasn1/codec/cer/encoder.py b/src/lib/pyasn1/codec/cer/encoder.py index 4c05130af..e241e43df 100644 --- a/src/lib/pyasn1/codec/cer/encoder.py +++ b/src/lib/pyasn1/codec/cer/encoder.py @@ -1,87 +1,179 @@ -# CER encoder +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# from pyasn1.type import univ +from pyasn1.type import useful from pyasn1.codec.ber import encoder -from pyasn1.compat.octets import int2oct, null +from pyasn1.compat.octets import int2oct, str2octs, null +from pyasn1 import error + +__all__ = ['encode'] + class BooleanEncoder(encoder.IntegerEncoder): def encodeValue(self, encodeFun, client, defMode, maxChunkSize): if client == 0: - substrate = int2oct(0) + substrate = (0,) else: - substrate = int2oct(255) - return substrate, 0 + substrate = (255,) + return substrate, False, False + class BitStringEncoder(encoder.BitStringEncoder): def encodeValue(self, encodeFun, client, defMode, maxChunkSize): return encoder.BitStringEncoder.encodeValue( self, encodeFun, client, defMode, 1000 - ) + ) + class OctetStringEncoder(encoder.OctetStringEncoder): def encodeValue(self, encodeFun, client, defMode, maxChunkSize): return encoder.OctetStringEncoder.encodeValue( self, encodeFun, client, defMode, 1000 - ) + ) + + +class RealEncoder(encoder.RealEncoder): + def _chooseEncBase(self, value): + m, b, e = value + return self._dropFloatingPoint(m, b, e) + -# specialized RealEncoder here # specialized GeneralStringEncoder here -# specialized GeneralizedTimeEncoder here -# specialized UTCTimeEncoder here + +class GeneralizedTimeEncoder(OctetStringEncoder): + zchar = str2octs('Z') + pluschar = str2octs('+') + minuschar = str2octs('-') + zero = str2octs('0') + + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + octets = client.asOctets() + # This breaks too many existing data items + # if '.' not in octets: + # raise error.PyAsn1Error('Format must include fraction of second: %r' % octets) + if len(octets) < 15: + raise error.PyAsn1Error('Bad UTC time length: %r' % octets) + if self.pluschar in octets or self.minuschar in octets: + raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if octets[-1] != self.zchar[0]: + raise error.PyAsn1Error('Missing timezone specifier: %r' % octets) + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + + +class UTCTimeEncoder(encoder.OctetStringEncoder): + zchar = str2octs('Z') + pluschar = str2octs('+') + minuschar = str2octs('-') + + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + octets = client.asOctets() + if self.pluschar in octets or self.minuschar in octets: + raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if octets and octets[-1] != self.zchar[0]: + client = client.clone(octets + self.zchar) + if len(client) != 13: + raise error.PyAsn1Error('Bad UTC time length: %r' % client) + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + class SetOfEncoder(encoder.SequenceOfEncoder): def encodeValue(self, encodeFun, client, defMode, maxChunkSize): - if isinstance(client, univ.SequenceAndSetBase): - client.setDefaultComponents() client.verifySizeSpec() - substrate = null; idx = len(client) + substrate = null + idx = len(client) # This is certainly a hack but how else do I distinguish SetOf # from Set if they have the same tags&constraints? if isinstance(client, univ.SequenceAndSetBase): # Set + namedTypes = client.getComponentType() comps = [] while idx > 0: - idx = idx - 1 - if client[idx] is None: # Optional component + idx -= 1 + if namedTypes[idx].isOptional and not client[idx].isValue: continue - if client.getDefaultComponentByPosition(idx) == client[idx]: + if namedTypes[idx].isDefaulted and client[idx] == namedTypes[idx].asn1Object: continue comps.append(client[idx]) - comps.sort(key=lambda x: isinstance(x, univ.Choice) and \ - x.getMinTagSet() or x.getTagSet()) + comps.sort(key=lambda x: isinstance(x, univ.Choice) and x.getMinTagSet() or x.tagSet) for c in comps: substrate += encodeFun(c, defMode, maxChunkSize) else: # SetOf compSubs = [] while idx > 0: - idx = idx - 1 + idx -= 1 compSubs.append( encodeFun(client[idx], defMode, maxChunkSize) - ) + ) compSubs.sort() # perhaps padding's not needed substrate = null for compSub in compSubs: substrate += compSub - return substrate, 1 + return substrate, True, True + tagMap = encoder.tagMap.copy() tagMap.update({ univ.Boolean.tagSet: BooleanEncoder(), univ.BitString.tagSet: BitStringEncoder(), univ.OctetString.tagSet: OctetStringEncoder(), + univ.Real.tagSet: RealEncoder(), + useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(), + useful.UTCTime.tagSet: UTCTimeEncoder(), univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set - }) +}) typeMap = encoder.typeMap.copy() typeMap.update({ + univ.Boolean.typeId: BooleanEncoder(), + univ.BitString.typeId: BitStringEncoder(), + univ.OctetString.typeId: OctetStringEncoder(), + univ.Real.typeId: RealEncoder(), + useful.GeneralizedTime.typeId: GeneralizedTimeEncoder(), + useful.UTCTime.typeId: UTCTimeEncoder(), univ.Set.typeId: SetOfEncoder(), univ.SetOf.typeId: SetOfEncoder() - }) +}) + class Encoder(encoder.Encoder): - def __call__(self, client, defMode=0, maxChunkSize=0): + def __call__(self, client, defMode=False, maxChunkSize=0): return encoder.Encoder.__call__(self, client, defMode, maxChunkSize) + +#: Turns ASN.1 object into CER octet stream. +#: +#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: walks all its components recursively and produces a CER octet stream. +#: +#: Parameters +#: ---------- +# value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: A pyasn1 object to encode +#: +#: defMode: :py:class:`bool` +#: If `False`, produces indefinite length encoding +#: +#: maxChunkSize: :py:class:`int` +#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size) +#: +#: Returns +#: ------- +#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: Given ASN.1 object encoded into BER octetstream +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On encoding errors encode = Encoder(tagMap, typeMap) # EncoderFactory queries class instance and builds a map of tags -> encoders diff --git a/src/lib/pyasn1/codec/der/decoder.py b/src/lib/pyasn1/codec/der/decoder.py index 604abec2b..24d3cbcb9 100644 --- a/src/lib/pyasn1/codec/der/decoder.py +++ b/src/lib/pyasn1/codec/der/decoder.py @@ -1,9 +1,69 @@ -# DER decoder +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# from pyasn1.type import univ from pyasn1.codec.cer import decoder -tagMap = decoder.tagMap -typeMap = decoder.typeMap -Decoder = decoder.Decoder +__all__ = ['decode'] + +class BitStringDecoder(decoder.BitStringDecoder): + supportConstructedForm = False + + +class OctetStringDecoder(decoder.OctetStringDecoder): + supportConstructedForm = False + +# TODO: prohibit non-canonical encoding +RealDecoder = decoder.RealDecoder + +tagMap = decoder.tagMap.copy() +tagMap.update( + {univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: OctetStringDecoder(), + univ.Real.tagSet: RealDecoder()} +) + +typeMap = decoder.typeMap.copy() + +# Put in non-ambiguous types for faster codec lookup +for typeDecoder in tagMap.values(): + typeId = typeDecoder.protoComponent.__class__.typeId + if typeId is not None and typeId not in typeMap: + typeMap[typeId] = typeDecoder + + +class Decoder(decoder.Decoder): + supportIndefLength = False + + +#: Turns DER octet stream into an ASN.1 object. +#: +#: Takes DER octetstream and decode it into an ASN.1 object +#: (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which +#: may be a scalar or an arbitrary nested structure. +#: +#: Parameters +#: ---------- +#: substrate: :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: DER octetstream +#: +#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative +#: A pyasn1 type object to act as a template guiding the decoder. Depending on the ASN.1 structure +#: being decoded, *asn1Spec* may or may not be required. Most common reason for +#: it to require is that ASN.1 structure is encoded in *IMPLICIT* tagging mode. +#: +#: Returns +#: ------- +#: : :py:class:`tuple` +#: A tuple of pyasn1 object recovered from DER substrate (:py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: and the unprocessed trailing portion of the *substrate* (may be empty) +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On decoding errors decode = Decoder(tagMap, typeMap) diff --git a/src/lib/pyasn1/codec/der/encoder.py b/src/lib/pyasn1/codec/der/encoder.py index 4e5faefad..2d615e3fd 100644 --- a/src/lib/pyasn1/codec/der/encoder.py +++ b/src/lib/pyasn1/codec/der/encoder.py @@ -1,28 +1,67 @@ -# DER encoder +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# from pyasn1.type import univ from pyasn1.codec.cer import encoder +from pyasn1 import error + +__all__ = ['encode'] + class SetOfEncoder(encoder.SetOfEncoder): - def _cmpSetComponents(self, c1, c2): - tagSet1 = isinstance(c1, univ.Choice) and \ - c1.getEffectiveTagSet() or c1.getTagSet() - tagSet2 = isinstance(c2, univ.Choice) and \ - c2.getEffectiveTagSet() or c2.getTagSet() + @staticmethod + def _cmpSetComponents(c1, c2): + tagSet1 = isinstance(c1, univ.Choice) and c1.effectiveTagSet or c1.tagSet + tagSet2 = isinstance(c2, univ.Choice) and c2.effectiveTagSet or c2.tagSet return cmp(tagSet1, tagSet2) + tagMap = encoder.tagMap.copy() tagMap.update({ - # Overload CER encodrs with BER ones (a bit hackerish XXX) + # Overload CER encoders with BER ones (a bit hackerish XXX) univ.BitString.tagSet: encoder.encoder.BitStringEncoder(), univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(), # Set & SetOf have same tags univ.SetOf().tagSet: SetOfEncoder() - }) +}) + +typeMap = encoder.typeMap.copy() -typeMap = encoder.typeMap class Encoder(encoder.Encoder): - def __call__(self, client, defMode=1, maxChunkSize=0): + supportIndefLength = False + + def __call__(self, client, defMode=True, maxChunkSize=0): + if not defMode or maxChunkSize: + raise error.PyAsn1Error('DER forbids indefinite length mode') return encoder.Encoder.__call__(self, client, defMode, maxChunkSize) - + +#: Turns ASN.1 object into DER octet stream. +#: +#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: walks all its components recursively and produces a DER octet stream. +#: +#: Parameters +#: ---------- +# value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: A pyasn1 object to encode +#: +#: defMode: :py:class:`bool` +#: If `False`, produces indefinite length encoding +#: +#: maxChunkSize: :py:class:`int` +#: Maximum chunk size in chunked encoding mode (0 denotes unlimited chunk size) +#: +#: Returns +#: ------- +#: : :py:class:`bytes` (Python 3) or :py:class:`str` (Python 2) +#: Given ASN.1 object encoded into BER octetstream +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On encoding errors encode = Encoder(tagMap, typeMap) diff --git a/src/lib/pyasn1/codec/native/__init__.py b/src/lib/pyasn1/codec/native/__init__.py new file mode 100644 index 000000000..8c3066b2e --- /dev/null +++ b/src/lib/pyasn1/codec/native/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/src/lib/pyasn1/codec/native/decoder.py b/src/lib/pyasn1/codec/native/decoder.py new file mode 100644 index 000000000..be75cb861 --- /dev/null +++ b/src/lib/pyasn1/codec/native/decoder.py @@ -0,0 +1,188 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import base, univ, char, useful, tag +from pyasn1 import debug, error + +__all__ = ['decode'] + + +class AbstractScalarDecoder(object): + def __call__(self, pyObject, asn1Spec, decoderFunc=None): + return asn1Spec.clone(pyObject) + + +class BitStringDecoder(AbstractScalarDecoder): + def __call__(self, pyObject, asn1Spec, decoderFunc=None): + return asn1Spec.clone(univ.BitString.fromBinaryString(pyObject)) + + +class SequenceOrSetDecoder(object): + def __call__(self, pyObject, asn1Spec, decoderFunc): + asn1Value = asn1Spec.clone() + + componentsTypes = asn1Spec.getComponentType() + + for field in asn1Value: + if field in pyObject: + asn1Value[field] = decoderFunc(pyObject[field], componentsTypes[field].asn1Object) + + return asn1Value + + +class SequenceOfOrSetOfDecoder(object): + def __call__(self, pyObject, asn1Spec, decoderFunc): + asn1Value = asn1Spec.clone() + + for pyValue in pyObject: + asn1Value.append(decoderFunc(pyValue, asn1Spec.getComponentType())) + + return asn1Value + + +class ChoiceDecoder(object): + def __call__(self, pyObject, asn1Spec, decoderFunc): + asn1Value = asn1Spec.clone() + + componentsTypes = asn1Spec.getComponentType() + + for field in pyObject: + if field in componentsTypes: + asn1Value[field] = decoderFunc(pyObject[field], componentsTypes[field].asn1Object) + break + + return asn1Value + + +tagMap = { + univ.Integer.tagSet: AbstractScalarDecoder(), + univ.Boolean.tagSet: AbstractScalarDecoder(), + univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: AbstractScalarDecoder(), + univ.Null.tagSet: AbstractScalarDecoder(), + univ.ObjectIdentifier.tagSet: AbstractScalarDecoder(), + univ.Enumerated.tagSet: AbstractScalarDecoder(), + univ.Real.tagSet: AbstractScalarDecoder(), + univ.Sequence.tagSet: SequenceOrSetDecoder(), # conflicts with SequenceOf + univ.Set.tagSet: SequenceOrSetDecoder(), # conflicts with SetOf + univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any + # character string types + char.UTF8String.tagSet: AbstractScalarDecoder(), + char.NumericString.tagSet: AbstractScalarDecoder(), + char.PrintableString.tagSet: AbstractScalarDecoder(), + char.TeletexString.tagSet: AbstractScalarDecoder(), + char.VideotexString.tagSet: AbstractScalarDecoder(), + char.IA5String.tagSet: AbstractScalarDecoder(), + char.GraphicString.tagSet: AbstractScalarDecoder(), + char.VisibleString.tagSet: AbstractScalarDecoder(), + char.GeneralString.tagSet: AbstractScalarDecoder(), + char.UniversalString.tagSet: AbstractScalarDecoder(), + char.BMPString.tagSet: AbstractScalarDecoder(), + # useful types + useful.ObjectDescriptor.tagSet: AbstractScalarDecoder(), + useful.GeneralizedTime.tagSet: AbstractScalarDecoder(), + useful.UTCTime.tagSet: AbstractScalarDecoder() +} + +# Put in ambiguous & non-ambiguous types for faster codec lookup +typeMap = { + univ.Integer.typeId: AbstractScalarDecoder(), + univ.Boolean.typeId: AbstractScalarDecoder(), + univ.BitString.typeId: BitStringDecoder(), + univ.OctetString.typeId: AbstractScalarDecoder(), + univ.Null.typeId: AbstractScalarDecoder(), + univ.ObjectIdentifier.typeId: AbstractScalarDecoder(), + univ.Enumerated.typeId: AbstractScalarDecoder(), + univ.Real.typeId: AbstractScalarDecoder(), + # ambiguous base types + univ.Set.typeId: SequenceOrSetDecoder(), + univ.SetOf.typeId: SequenceOfOrSetOfDecoder(), + univ.Sequence.typeId: SequenceOrSetDecoder(), + univ.SequenceOf.typeId: SequenceOfOrSetOfDecoder(), + univ.Choice.typeId: ChoiceDecoder(), + univ.Any.typeId: AbstractScalarDecoder(), + # character string types + char.UTF8String.typeId: AbstractScalarDecoder(), + char.NumericString.typeId: AbstractScalarDecoder(), + char.PrintableString.typeId: AbstractScalarDecoder(), + char.TeletexString.typeId: AbstractScalarDecoder(), + char.VideotexString.typeId: AbstractScalarDecoder(), + char.IA5String.typeId: AbstractScalarDecoder(), + char.GraphicString.typeId: AbstractScalarDecoder(), + char.VisibleString.typeId: AbstractScalarDecoder(), + char.GeneralString.typeId: AbstractScalarDecoder(), + char.UniversalString.typeId: AbstractScalarDecoder(), + char.BMPString.typeId: AbstractScalarDecoder(), + # useful types + useful.ObjectDescriptor.typeId: AbstractScalarDecoder(), + useful.GeneralizedTime.typeId: AbstractScalarDecoder(), + useful.UTCTime.typeId: AbstractScalarDecoder() +} + + +class Decoder(object): + + # noinspection PyDefaultArgument + def __init__(self, tagMap, typeMap): + self.__tagMap = tagMap + self.__typeMap = typeMap + + def __call__(self, pyObject, asn1Spec): + if debug.logger & debug.flagDecoder: + debug.scope.push(type(pyObject).__name__) + debug.logger('decoder called at scope %s, working with type %s' % (debug.scope, type(pyObject).__name__)) + + if asn1Spec is None or not isinstance(asn1Spec, base.Asn1Item): + raise error.PyAsn1Error('asn1Spec is not valid (should be an instance of an ASN.1 Item, not %s)' % asn1Spec.__class__.__name__) + + try: + valueDecoder = self.__typeMap[asn1Spec.typeId] + except KeyError: + # use base type for codec lookup to recover untagged types + baseTagSet = tag.TagSet(asn1Spec.tagSet.baseTag, asn1Spec.tagSet.baseTag) + try: + valueDecoder = self.__tagMap[baseTagSet] + except KeyError: + raise error.PyAsn1Error('Unknown ASN.1 tag %s' % asn1Spec.tagSet) + + if debug.logger & debug.flagDecoder: + debug.logger('calling decoder %s on Python type %s <%s>' % (type(valueDecoder).__name__, type(pyObject).__name__, repr(pyObject))) + + value = valueDecoder(pyObject, asn1Spec, self) + + if debug.logger & debug.flagDecoder: + debug.logger('decoder %s produced ASN.1 type %s <%s>' % (type(valueDecoder).__name__, type(value).__name__, repr(value))) + debug.scope.pop() + + return value + + +#: Turns Python objects of built-in types into ASN.1 objects. +#: +#: Takes Python objects of built-in types and turns them into a tree of +#: ASN.1 objects (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) which +#: may be a scalar or an arbitrary nested structure. +#: +#: Parameters +#: ---------- +#: pyObject: :py:class:`object` +#: A scalar or nested Python objects +#: +#: asn1Spec: any pyasn1 type object e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative +#: A pyasn1 type object to act as a template guiding the decoder. It is required +#: for successful interpretation of Python objects mapping into their ASN.1 +#: representations. +#: +#: Returns +#: ------- +#: : :py:class:`~pyasn1.type.base.PyAsn1Item` derivative +#: A scalar or constructed pyasn1 object +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On decoding errors +decode = Decoder(tagMap, typeMap) diff --git a/src/lib/pyasn1/codec/native/encoder.py b/src/lib/pyasn1/codec/native/encoder.py new file mode 100644 index 000000000..afeb8ae0f --- /dev/null +++ b/src/lib/pyasn1/codec/native/encoder.py @@ -0,0 +1,215 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +try: + from collections import OrderedDict + +except ImportError: + OrderedDict = dict + +from pyasn1.type import base, univ, char, useful +from pyasn1 import debug, error + +__all__ = ['encode'] + + +class AbstractItemEncoder(object): + def encode(self, encodeFun, value): + raise error.PyAsn1Error('Not implemented') + + +class ExplicitlyTaggedItemEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + if isinstance(value, base.AbstractConstructedAsn1Item): + value = value.clone(tagSet=value.tagSet[:-1], + cloneValueFlag=1) + else: + value = value.clone(tagSet=value.tagSet[:-1]) + return encodeFun(value) + +explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder() + + +class BooleanEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return bool(value) + + +class IntegerEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return int(value) + + +class BitStringEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return str(value) + + +class OctetStringEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return value.asOctets() + + +class TextStringEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return value.prettyPrint() + + +class NullEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return None + + +class ObjectIdentifierEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return str(value) + + +class RealEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return float(value) + + +class SetEncoder(AbstractItemEncoder): + protoDict = dict + def encode(self, encodeFun, value): + value.verifySizeSpec() + namedTypes = value.getComponentType() + substrate = self.protoDict() + for idx, (key, subValue) in enumerate(value.items()): + if namedTypes[idx].isOptional and not value[idx].isValue: + continue + substrate[key] = encodeFun(subValue) + return substrate + + +class SequenceEncoder(SetEncoder): + protoDict = OrderedDict + + +class SequenceOfEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + value.verifySizeSpec() + return [encodeFun(x) for x in value] + + +class ChoiceEncoder(SequenceEncoder): + pass + + +class AnyEncoder(AbstractItemEncoder): + def encode(self, encodeFun, value): + return value.asOctets() + + +tagMap = { + univ.Boolean.tagSet: BooleanEncoder(), + univ.Integer.tagSet: IntegerEncoder(), + univ.BitString.tagSet: BitStringEncoder(), + univ.OctetString.tagSet: OctetStringEncoder(), + univ.Null.tagSet: NullEncoder(), + univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(), + univ.Enumerated.tagSet: IntegerEncoder(), + univ.Real.tagSet: RealEncoder(), + # Sequence & Set have same tags as SequenceOf & SetOf + univ.SequenceOf.tagSet: SequenceOfEncoder(), + univ.SetOf.tagSet: SequenceOfEncoder(), + univ.Choice.tagSet: ChoiceEncoder(), + # character string types + char.UTF8String.tagSet: TextStringEncoder(), + char.NumericString.tagSet: TextStringEncoder(), + char.PrintableString.tagSet: TextStringEncoder(), + char.TeletexString.tagSet: TextStringEncoder(), + char.VideotexString.tagSet: TextStringEncoder(), + char.IA5String.tagSet: TextStringEncoder(), + char.GraphicString.tagSet: TextStringEncoder(), + char.VisibleString.tagSet: TextStringEncoder(), + char.GeneralString.tagSet: TextStringEncoder(), + char.UniversalString.tagSet: TextStringEncoder(), + char.BMPString.tagSet: TextStringEncoder(), + # useful types + useful.ObjectDescriptor.tagSet: OctetStringEncoder(), + useful.GeneralizedTime.tagSet: OctetStringEncoder(), + useful.UTCTime.tagSet: OctetStringEncoder() +} + +# Type-to-codec map for ambiguous ASN.1 types +typeMap = { + univ.Set.typeId: SetEncoder(), + univ.SetOf.typeId: SequenceOfEncoder(), + univ.Sequence.typeId: SequenceEncoder(), + univ.SequenceOf.typeId: SequenceOfEncoder(), + univ.Choice.typeId: ChoiceEncoder(), + univ.Any.typeId: AnyEncoder() +} + + +class Encoder(object): + + # noinspection PyDefaultArgument + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap + + def __call__(self, asn1Value): + if not isinstance(asn1Value, base.Asn1Item): + raise error.PyAsn1Error('value is not valid (should be an instance of an ASN.1 Item)') + + if debug.logger & debug.flagEncoder: + debug.scope.push(type(asn1Value).__name__) + debug.logger('encoder called for type %s <%s>' % (type(asn1Value).__name__, asn1Value.prettyPrint())) + + tagSet = asn1Value.tagSet + if len(tagSet) > 1: + concreteEncoder = explicitlyTaggedItemEncoder + else: + if asn1Value.typeId is not None and asn1Value.typeId in self.__typeMap: + concreteEncoder = self.__typeMap[asn1Value.typeId] + elif tagSet in self.__tagMap: + concreteEncoder = self.__tagMap[tagSet] + else: + tagSet = asn1Value.baseTagSet + if tagSet in self.__tagMap: + concreteEncoder = self.__tagMap[tagSet] + else: + raise error.PyAsn1Error('No encoder for %s' % (asn1Value,)) + + debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %s' % (type(concreteEncoder).__name__, tagSet)) + + pyObject = concreteEncoder.encode(self, asn1Value) + + if debug.logger & debug.flagEncoder: + debug.logger('encoder %s produced: %s' % (type(concreteEncoder).__name__, repr(pyObject))) + debug.scope.pop() + + return pyObject + + +#: Turns ASN.1 object into a Python built-in type object(s). +#: +#: Takes any ASN.1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: walks all its components recursively and produces a Python built-in type or a tree +#: of those. +#: +#: One exception is that instead of :py:class:`dict`, the :py:class:`OrderedDict` +#: can be produced (whenever available) to preserve ordering of the components +#: in ASN.1 SEQUENCE. +#: +#: Parameters +#: ---------- +# asn1Value: any pyasn1 object (e.g. :py:class:`~pyasn1.type.base.PyAsn1Item` derivative) +#: pyasn1 object to encode (or a tree of them) +#: +#: Returns +#: ------- +#: : :py:class:`object` +#: Python built-in type instance (or a tree of them) +#: +#: Raises +#: ------ +#: : :py:class:`pyasn1.error.PyAsn1Error` +#: On encoding errors +encode = Encoder(tagMap, typeMap) diff --git a/src/lib/pyasn1/compat/binary.py b/src/lib/pyasn1/compat/binary.py new file mode 100644 index 000000000..65c42c742 --- /dev/null +++ b/src/lib/pyasn1/compat/binary.py @@ -0,0 +1,25 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from sys import version_info + +if version_info[0:2] < (2, 6): + def bin(value): + bitstring = [] + + while value: + if value & 1 == 1: + bitstring.append('1') + else: + bitstring.append('0') + + value >>= 1 + + bitstring.reverse() + + return '0b' + ''.join(bitstring) +else: + bin = bin diff --git a/src/lib/pyasn1/compat/integer.py b/src/lib/pyasn1/compat/integer.py new file mode 100644 index 000000000..ae9c7e1d5 --- /dev/null +++ b/src/lib/pyasn1/compat/integer.py @@ -0,0 +1,96 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +if sys.version_info[0:2] < (3, 2): + from binascii import a2b_hex, b2a_hex +from pyasn1.compat.octets import oct2int, null + +if sys.version_info[0:2] < (3, 2): + def from_bytes(octets, signed=False): + value = long(b2a_hex(str(octets)), 16) + + if signed and oct2int(octets[0]) & 0x80: + return value - (1 << len(octets) * 8) + + return value + + def to_bytes(value, signed=False, length=0): + if value < 0: + if signed: + bits = bitLength(value) + + # two's complement form + maxValue = 1 << bits + valueToEncode = (value + maxValue) % maxValue + + else: + raise OverflowError('can\'t convert negative int to unsigned') + elif value == 0 and length == 0: + return null + else: + bits = 0 + valueToEncode = value + + hexValue = hex(valueToEncode)[2:] + if hexValue.endswith('L'): + hexValue = hexValue[:-1] + + if len(hexValue) & 1: + hexValue = '0' + hexValue + + # padding may be needed for two's complement encoding + if value != valueToEncode or length: + hexLength = len(hexValue) * 4 + + padLength = max(length, bits) + + if padLength > hexLength: + hexValue = '00' * ((padLength - hexLength - 1) // 8 + 1) + hexValue + elif length and hexLength - length > 7: + raise OverflowError('int too big to convert') + + firstOctet = int(hexValue[:2], 16) + + if signed: + if firstOctet & 0x80: + if value >= 0: + hexValue = '00' + hexValue + elif value < 0: + hexValue = 'ff' + hexValue + + octets_value = a2b_hex(hexValue) + + return octets_value + + def bitLength(number): + # bits in unsigned number + hexValue = hex(abs(number)) + bits = len(hexValue) - 2 + if hexValue.endswith('L'): + bits -= 1 + if bits & 1: + bits += 1 + bits *= 4 + # TODO: strip lhs zeros + return bits + +else: + + def from_bytes(octets, signed=False): + return int.from_bytes(bytes(octets), 'big', signed=signed) + + def to_bytes(value, signed=False, length=0): + length = max(value.bit_length(), length) + + if signed and length % 8 == 0: + length += 1 + + return value.to_bytes(length // 8 + (length % 8 and 1 or 0), 'big', signed=signed) + + def bitLength(number): + return int(number).bit_length() + diff --git a/src/lib/pyasn1/compat/octets.py b/src/lib/pyasn1/compat/octets.py index f7f2a29bf..ec497a684 100644 --- a/src/lib/pyasn1/compat/octets.py +++ b/src/lib/pyasn1/compat/octets.py @@ -1,20 +1,46 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# from sys import version_info if version_info[0] <= 2: int2oct = chr - ints2octs = lambda s: ''.join([ int2oct(x) for x in s ]) + # noinspection PyPep8 + ints2octs = lambda s: ''.join([int2oct(x) for x in s]) null = '' oct2int = ord - octs2ints = lambda s: [ oct2int(x) for x in s ] + # noinspection PyPep8 + octs2ints = lambda s: [oct2int(x) for x in s] + # noinspection PyPep8 str2octs = lambda x: x + # noinspection PyPep8 octs2str = lambda x: x + # noinspection PyPep8 isOctetsType = lambda s: isinstance(s, str) + # noinspection PyPep8 + isStringType = lambda s: isinstance(s, (str, unicode)) + # noinspection PyPep8 + ensureString = str else: ints2octs = bytes + # noinspection PyPep8 int2oct = lambda x: ints2octs((x,)) null = ints2octs() + # noinspection PyPep8 oct2int = lambda x: x - octs2ints = lambda s: [ x for x in s ] - str2octs = lambda x: x.encode() - octs2str = lambda x: x.decode() + # noinspection PyPep8 + octs2ints = lambda x: x + # noinspection PyPep8 + str2octs = lambda x: x.encode('iso-8859-1') + # noinspection PyPep8 + octs2str = lambda x: x.decode('iso-8859-1') + # noinspection PyPep8 isOctetsType = lambda s: isinstance(s, bytes) + # noinspection PyPep8 + isStringType = lambda s: isinstance(s, str) + # noinspection PyPep8 + ensureString = bytes + diff --git a/src/lib/pyasn1/debug.py b/src/lib/pyasn1/debug.py index c27cb1d44..04a9da5cf 100644 --- a/src/lib/pyasn1/debug.py +++ b/src/lib/pyasn1/debug.py @@ -1,36 +1,96 @@ -import sys +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import logging from pyasn1.compat.octets import octs2ints from pyasn1 import error from pyasn1 import __version__ -flagNone = 0x0000 -flagEncoder = 0x0001 -flagDecoder = 0x0002 -flagAll = 0xffff +__all__ = ['Debug', 'setLogger', 'hexdump'] + +flagNone = 0x0000 +flagEncoder = 0x0001 +flagDecoder = 0x0002 +flagAll = 0xffff flagMap = { 'encoder': flagEncoder, 'decoder': flagDecoder, 'all': flagAll - } +} + + +class Printer(object): + # noinspection PyShadowingNames + def __init__(self, logger=None, handler=None, formatter=None): + if logger is None: + logger = logging.getLogger('pyasn1') + logger.setLevel(logging.DEBUG) + if handler is None: + handler = logging.StreamHandler() + if formatter is None: + formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s') + handler.setFormatter(formatter) + handler.setLevel(logging.DEBUG) + logger.addHandler(handler) + self.__logger = logger + + def __call__(self, msg): + self.__logger.debug(msg) + + def __str__(self): + return '' + + +if hasattr(logging, 'NullHandler'): + NullHandler = logging.NullHandler +else: + # Python 2.6 and older + class NullHandler(logging.Handler): + def emit(self, record): + pass + -class Debug: - defaultPrinter = sys.stderr.write - def __init__(self, *flags): +class Debug(object): + defaultPrinter = None + + def __init__(self, *flags, **options): self._flags = flagNone - self._printer = self.defaultPrinter + if options.get('printer') is not None: + self._printer = options.get('printer') + elif self.defaultPrinter is not None: + self._printer = self.defaultPrinter + if 'loggerName' in options: + # route our logs to parent logger + self._printer = Printer( + logger=logging.getLogger(options['loggerName']), + handler=NullHandler() + ) + else: + self._printer = Printer() self('running pyasn1 version %s' % __version__) for f in flags: - if f not in flagMap: - raise error.PyAsn1Error('bad debug flag %s' % (f,)) - self._flags = self._flags | flagMap[f] - self('debug category \'%s\' enabled' % f) - + inverse = f and f[0] in ('!', '~') + if inverse: + f = f[1:] + try: + if inverse: + self._flags &= ~flagMap[f] + else: + self._flags |= flagMap[f] + except KeyError: + raise error.PyAsn1Error('bad debug flag %s' % f) + + self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled')) + def __str__(self): return 'logger %s, flags %x' % (self._printer, self._flags) - + def __call__(self, msg): - self._printer('DBG: %s\n' % msg) + self._printer(msg) def __and__(self, flag): return self._flags & flag @@ -38,19 +98,23 @@ def __and__(self, flag): def __rand__(self, flag): return flag & self._flags + logger = 0 + def setLogger(l): global logger logger = l + def hexdump(octets): return ' '.join( - [ '%s%.2X' % (n%16 == 0 and ('\n%.5d: ' % n) or '', x) - for n,x in zip(range(len(octets)), octs2ints(octets)) ] - ) + ['%s%.2X' % (n % 16 == 0 and ('\n%.5d: ' % n) or '', x) + for n, x in zip(range(len(octets)), octs2ints(octets))] + ) -class Scope: + +class Scope(object): def __init__(self): self._list = [] @@ -62,4 +126,5 @@ def push(self, token): def pop(self): return self._list.pop() + scope = Scope() diff --git a/src/lib/pyasn1/error.py b/src/lib/pyasn1/error.py index 716406ff6..85308557b 100644 --- a/src/lib/pyasn1/error.py +++ b/src/lib/pyasn1/error.py @@ -1,3 +1,18 @@ -class PyAsn1Error(Exception): pass -class ValueConstraintError(PyAsn1Error): pass -class SubstrateUnderrunError(PyAsn1Error): pass +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# + + +class PyAsn1Error(Exception): + pass + + +class ValueConstraintError(PyAsn1Error): + pass + + +class SubstrateUnderrunError(PyAsn1Error): + pass diff --git a/src/lib/pyasn1/type/base.py b/src/lib/pyasn1/type/base.py index 40873719c..00c329c21 100644 --- a/src/lib/pyasn1/type/base.py +++ b/src/lib/pyasn1/type/base.py @@ -1,134 +1,402 @@ -# Base classes for ASN.1 types +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# import sys -from pyasn1.type import constraint, tagmap +from pyasn1.type import constraint, tagmap, tag from pyasn1 import error -class Asn1Item: pass +__all__ = ['Asn1Item', 'Asn1ItemBase', 'AbstractSimpleAsn1Item', 'AbstractConstructedAsn1Item'] + + +class Asn1Item(object): + @classmethod + def getTypeId(cls, increment=1): + try: + Asn1Item._typeCounter += increment + except AttributeError: + Asn1Item._typeCounter = increment + return Asn1Item._typeCounter + class Asn1ItemBase(Asn1Item): - # Set of tags for this ASN.1 type - tagSet = () - - # A list of constraint.Constraint instances for checking values + #: Set or return a :py:class:`~pyasn1.type.tag.TagSet` object representing + #: ASN.1 tag(s) associated with |ASN.1| type. + tagSet = tag.TagSet() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing constraints on initialization values. subtypeSpec = constraint.ConstraintsIntersection() - # Used for ambiguous ASN.1 types identification + # Disambiguation ASN.1 types identification typeId = None - + def __init__(self, tagSet=None, subtypeSpec=None): if tagSet is None: - self._tagSet = self.tagSet + self._tagSet = self.__class__.tagSet else: self._tagSet = tagSet if subtypeSpec is None: - self._subtypeSpec = self.subtypeSpec + self._subtypeSpec = self.__class__.subtypeSpec else: self._subtypeSpec = subtypeSpec - def _verifySubtypeSpec(self, value, idx=None): + @property + def effectiveTagSet(self): + """For |ASN.1| type is equivalent to *tagSet* + """ + return self._tagSet # used by untagged types + + @property + def tagMap(self): + """Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping ASN.1 tags to ASN.1 objects within callee object. + """ try: - self._subtypeSpec(value, idx) - except error.PyAsn1Error: - c, i, t = sys.exc_info() - raise c('%s at %s' % (i, self.__class__.__name__)) - - def getSubtypeSpec(self): return self._subtypeSpec - - def getTagSet(self): return self._tagSet - def getEffectiveTagSet(self): return self._tagSet # used by untagged types - def getTagMap(self): return tagmap.TagMap({self._tagSet: self}) - - def isSameTypeWith(self, other): + return self._tagMap + + except AttributeError: + self._tagMap = tagmap.TagMap({self._tagSet: self}) + return self._tagMap + + def isSameTypeWith(self, other, matchTags=True, matchConstraints=True): + """Examine |ASN.1| type for equality with other ASN.1 type. + + ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints + (:py:mod:`~pyasn1.type.constraint`) are examined when carrying + out ASN.1 types comparison. + + No Python inheritance relationship between PyASN1 objects is considered. + + Parameters + ---------- + other: a pyasn1 type object + Class instance representing ASN.1 type. + + Returns + ------- + : :class:`bool` + :class:`True` if *other* is |ASN.1| type, + :class:`False` otherwise. + """ return self is other or \ - self._tagSet == other.getTagSet() and \ - self._subtypeSpec == other.getSubtypeSpec() - def isSuperTypeOf(self, other): - """Returns true if argument is a ASN1 subtype of ourselves""" - return self._tagSet.isSuperTagSetOf(other.getTagSet()) and \ - self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec()) - -class __NoValue: + (not matchTags or + self._tagSet == other.tagSet) and \ + (not matchConstraints or + self._subtypeSpec == other.subtypeSpec) + + def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True): + """Examine |ASN.1| type for subtype relationship with other ASN.1 type. + + ASN.1 tags (:py:mod:`~pyasn1.type.tag`) and constraints + (:py:mod:`~pyasn1.type.constraint`) are examined when carrying + out ASN.1 types comparison. + + No Python inheritance relationship between PyASN1 objects is considered. + + + Parameters + ---------- + other: a pyasn1 type object + Class instance representing ASN.1 type. + + Returns + ------- + : :class:`bool` + :class:`True` if *other* is a subtype of |ASN.1| type, + :class:`False` otherwise. + """ + return (not matchTags or + self._tagSet.isSuperTagSetOf(other.tagSet)) and \ + (not matchConstraints or + (self._subtypeSpec.isSuperTypeOf(other.subtypeSpec))) + + @staticmethod + def isNoValue(*values): + for value in values: + if value is not None and value is not noValue: + return False + return True + + # backward compatibility + + def getTagSet(self): + return self.tagSet + + def getEffectiveTagSet(self): + return self.effectiveTagSet + + def getTagMap(self): + return self.tagMap + + def getSubtypeSpec(self): + return self.subtypeSpec + + +class NoValue(object): + """Create a singleton instance of NoValue class. + + NoValue object can be used as an initializer on PyASN1 type class + instantiation to represent ASN.1 type rather than ASN.1 data value. + + No operations other than type comparison can be performed on + a PyASN1 type object. + """ + skipMethods = ('__getattribute__', '__getattr__', '__setattr__', '__delattr__', + '__class__', '__init__', '__del__', '__new__', '__repr__', + '__qualname__', '__objclass__', 'im_class', '__sizeof__') + + _instance = None + + def __new__(cls): + if cls._instance is None: + def getPlug(name): + def plug(self, *args, **kw): + raise error.PyAsn1Error('Uninitialized ASN.1 value ("%s" attribute looked up)' % name) + return plug + + op_names = [name + for typ in (str, int, list, dict) + for name in dir(typ) + if name not in cls.skipMethods and name.startswith('__') and name.endswith('__') and callable(getattr(typ, name))] + + for name in set(op_names): + setattr(cls, name, getPlug(name)) + + cls._instance = object.__new__(cls) + + return cls._instance + def __getattr__(self, attr): - raise error.PyAsn1Error('No value for %s()' % attr) - def __getitem__(self, i): - raise error.PyAsn1Error('No value') - -noValue = __NoValue() + if attr in self.skipMethods: + raise AttributeError('attribute %s not present' % attr) + raise error.PyAsn1Error('No value for "%s"' % attr) + + def __repr__(self): + return '%s()' % self.__class__.__name__ + +noValue = NoValue() + # Base class for "simple" ASN.1 objects. These are immutable. -class AbstractSimpleAsn1Item(Asn1ItemBase): +class AbstractSimpleAsn1Item(Asn1ItemBase): + #: Default payload value defaultValue = noValue - def __init__(self, value=None, tagSet=None, subtypeSpec=None): + + def __init__(self, value=noValue, tagSet=None, subtypeSpec=None): Asn1ItemBase.__init__(self, tagSet, subtypeSpec) if value is None or value is noValue: value = self.defaultValue - if value is None or value is noValue: - self.__hashedValue = value = noValue else: value = self.prettyIn(value) - self._verifySubtypeSpec(value) - self.__hashedValue = hash(value) + try: + self._subtypeSpec(value) + + except error.PyAsn1Error: + exType, exValue, exTb = sys.exc_info() + raise exType('%s at %s' % (exValue, self.__class__.__name__)) + + self.__hashedValue = None self._value = value self._len = None - + def __repr__(self): - if self._value is noValue: - return self.__class__.__name__ + '()' - else: - return self.__class__.__name__ + '(%s)' % (self.prettyOut(self._value),) - def __str__(self): return str(self._value) + representation = [] + if self._value is not self.defaultValue: + representation.append(self.prettyOut(self._value)) + if self._tagSet is not self.__class__.tagSet: + representation.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + representation.append('subtypeSpec=%r' % (self._subtypeSpec,)) + return '%s(%s)' % (self.__class__.__name__, ', '.join(representation)) + + def __str__(self): + return str(self._value) + def __eq__(self, other): return self is other and True or self._value == other - def __ne__(self, other): return self._value != other - def __lt__(self, other): return self._value < other - def __le__(self, other): return self._value <= other - def __gt__(self, other): return self._value > other - def __ge__(self, other): return self._value >= other + + def __ne__(self, other): + return self._value != other + + def __lt__(self, other): + return self._value < other + + def __le__(self, other): + return self._value <= other + + def __gt__(self, other): + return self._value > other + + def __ge__(self, other): + return self._value >= other + if sys.version_info[0] <= 2: - def __nonzero__(self): return bool(self._value) + def __nonzero__(self): + return self._value and True or False else: - def __bool__(self): return bool(self._value) - def __hash__(self): return self.__hashedValue + def __bool__(self): + return self._value and True or False - def clone(self, value=None, tagSet=None, subtypeSpec=None): - if value is None and tagSet is None and subtypeSpec is None: - return self - if value is None: + def __hash__(self): + if self.__hashedValue is None: + self.__hashedValue = hash(self._value) + return self.__hashedValue + + @property + def isValue(self): + """Indicate if |ASN.1| object represents ASN.1 type or ASN.1 value. + + The PyASN1 type objects can only participate in types comparison + and serve as a blueprint for serialization codecs to resolve + ambiguous types. + + The PyASN1 value objects can additionally participate in most + of built-in Python operations. + + Returns + ------- + : :class:`bool` + :class:`True` if object represents ASN.1 value and type, + :class:`False` if object represents just ASN.1 type. + + """ + return self._value is not noValue + + def clone(self, value=noValue, tagSet=None, subtypeSpec=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`tuple`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if value is None or value is noValue: value = self._value - if tagSet is None: + else: + isModified = True + if tagSet is None or tagSet is noValue: tagSet = self._tagSet - if subtypeSpec is None: + else: + isModified = True + if subtypeSpec is None or subtypeSpec is noValue: subtypeSpec = self._subtypeSpec - return self.__class__(value, tagSet, subtypeSpec) + else: + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec) + else: + return self - def subtype(self, value=None, implicitTag=None, explicitTag=None, + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, subtypeSpec=None): - if value is None: + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`tuple`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Add ASN.1 constraints object to one of the caller, then + use the result as new object's ASN.1 constraints. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if value is None or value is noValue: value = self._value - if implicitTag is not None: + else: + isModified = True + if implicitTag is not None and implicitTag is not noValue: tagSet = self._tagSet.tagImplicitly(implicitTag) - elif explicitTag is not None: + isModified = True + elif explicitTag is not None and explicitTag is not noValue: tagSet = self._tagSet.tagExplicitly(explicitTag) + isModified = True else: tagSet = self._tagSet - if subtypeSpec is None: + if subtypeSpec is None or subtypeSpec is noValue: subtypeSpec = self._subtypeSpec else: - subtypeSpec = subtypeSpec + self._subtypeSpec - return self.__class__(value, tagSet, subtypeSpec) + subtypeSpec = self._subtypeSpec + subtypeSpec + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec) + else: + return self - def prettyIn(self, value): return value - def prettyOut(self, value): return str(value) + def prettyIn(self, value): + return value + + def prettyOut(self, value): + return str(value) def prettyPrint(self, scope=0): - if self._value is noValue: - return '' - else: + """Provide human-friendly printable object representation. + + Returns + ------- + : :class:`str` + human-friendly type and/or value representation. + """ + if self.isValue: return self.prettyOut(self._value) + else: + return '' # XXX Compatibility stub - def prettyPrinter(self, scope=0): return self.prettyPrint(scope) - + def prettyPrinter(self, scope=0): + return self.prettyPrint(scope) + + # noinspection PyUnusedLocal + def prettyPrintType(self, scope=0): + return '%s -> %s' % (self.tagSet, self.__class__.__name__) + + # backward compatibility + + def hasValue(self): + return self.isValue + + # # Constructed types: # * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice @@ -148,9 +416,29 @@ def prettyPrinter(self, scope=0): return self.prettyPrint(scope) # of types for Sequence/Set/Choice. # +def setupComponent(): + """Returns a sentinel value. + + Indicates to a constructed type to set up its inner component so that it + can be referred to. This is useful in situation when you want to populate + descendants of a constructed type what requires being able to refer to + their parent types along the way. + + Example + ------- + + >>> constructed['record'] = setupComponent() + >>> constructed['record']['scalar'] = 42 + """ + return noValue + + class AbstractConstructedAsn1Item(Asn1ItemBase): - componentType = None - sizeSpec = constraint.ConstraintsIntersection() + + #: If `True`, requires exact component type matching, + #: otherwise subtype relation is only enforced + strictConstraints = False + def __init__(self, componentType=None, tagSet=None, subtypeSpec=None, sizeSpec=None): Asn1ItemBase.__init__(self, tagSet, subtypeSpec) @@ -163,87 +451,167 @@ def __init__(self, componentType=None, tagSet=None, else: self._sizeSpec = sizeSpec self._componentValues = [] - self._componentValuesSet = 0 def __repr__(self): - r = self.__class__.__name__ + '()' - for idx in range(len(self._componentValues)): - if self._componentValues[idx] is None: - continue - r = r + '.setComponentByPosition(%s, %r)' % ( - idx, self._componentValues[idx] - ) - return r + representation = [] + if self._componentType is not self.componentType: + representation.append('componentType=%r' % (self._componentType,)) + if self._tagSet is not self.__class__.tagSet: + representation.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + representation.append('subtypeSpec=%r' % (self._subtypeSpec,)) + representation = '%s(%s)' % (self.__class__.__name__, ', '.join(representation)) + if self._componentValues: + for idx, component in enumerate(self._componentValues): + if component is None or component is noValue: + continue + representation += '.setComponentByPosition(%d, %s)' % (idx, repr(component)) + return representation def __eq__(self, other): return self is other and True or self._componentValues == other - def __ne__(self, other): return self._componentValues != other - def __lt__(self, other): return self._componentValues < other - def __le__(self, other): return self._componentValues <= other - def __gt__(self, other): return self._componentValues > other - def __ge__(self, other): return self._componentValues >= other + + def __ne__(self, other): + return self._componentValues != other + + def __lt__(self, other): + return self._componentValues < other + + def __le__(self, other): + return self._componentValues <= other + + def __gt__(self, other): + return self._componentValues > other + + def __ge__(self, other): + return self._componentValues >= other + if sys.version_info[0] <= 2: - def __nonzero__(self): return bool(self._componentValues) + def __nonzero__(self): + return self._componentValues and True or False else: - def __bool__(self): return bool(self._componentValues) + def __bool__(self): + return self._componentValues and True or False - def getComponentTagMap(self): - raise error.PyAsn1Error('Method not implemented') + def _cloneComponentValues(self, myClone, cloneValueFlag): + pass + + def clone(self, tagSet=None, subtypeSpec=None, sizeSpec=None, cloneValueFlag=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 size constraint(s) - def _cloneComponentValues(self, myClone, cloneValueFlag): pass + Returns + ------- + : + new instance of |ASN.1| type/value - def clone(self, tagSet=None, subtypeSpec=None, sizeSpec=None, - cloneValueFlag=None): + """ if tagSet is None: tagSet = self._tagSet if subtypeSpec is None: subtypeSpec = self._subtypeSpec if sizeSpec is None: sizeSpec = self._sizeSpec - r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec) + clone = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec) if cloneValueFlag: - self._cloneComponentValues(r, cloneValueFlag) - return r + self._cloneComponentValues(clone, cloneValueFlag) + return clone def subtype(self, implicitTag=None, explicitTag=None, subtypeSpec=None, sizeSpec=None, cloneValueFlag=None): - if implicitTag is not None: + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 size constraint(s) + + Returns + ------- + : + new instance of |ASN.1| type/value + + """ + if implicitTag is not None and implicitTag is not noValue: tagSet = self._tagSet.tagImplicitly(implicitTag) - elif explicitTag is not None: + elif explicitTag is not None and explicitTag is not noValue: tagSet = self._tagSet.tagExplicitly(explicitTag) else: tagSet = self._tagSet - if subtypeSpec is None: + if subtypeSpec is None or subtypeSpec is noValue: subtypeSpec = self._subtypeSpec else: - subtypeSpec = subtypeSpec + self._subtypeSpec - if sizeSpec is None: + subtypeSpec = self._subtypeSpec + subtypeSpec + if sizeSpec is None or sizeSpec is noValue: sizeSpec = self._sizeSpec else: - sizeSpec = sizeSpec + self._sizeSpec - r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec) + sizeSpec += self._sizeSpec + clone = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec) if cloneValueFlag: - self._cloneComponentValues(r, cloneValueFlag) - return r - - def _verifyComponent(self, idx, value): pass + self._cloneComponentValues(clone, cloneValueFlag) + return clone - def verifySizeSpec(self): self._sizeSpec(self) + def verifySizeSpec(self): + self._sizeSpec(self) def getComponentByPosition(self, idx): raise error.PyAsn1Error('Method not implemented') + def setComponentByPosition(self, idx, value, verifyConstraints=True): raise error.PyAsn1Error('Method not implemented') - def getComponentType(self): return self._componentType + def setComponents(self, *args, **kwargs): + for idx, value in enumerate(args): + self[idx] = value + for k in kwargs: + self[k] = kwargs[k] + return self + + def getComponentType(self): + return self._componentType + + # backward compatibility -- no-op + def setDefaultComponents(self): + pass + + @property + def componentTagMap(self): + raise error.PyAsn1Error('Method not implemented') + + def __getitem__(self, idx): + return self.getComponentByPosition(idx) - def __getitem__(self, idx): return self.getComponentByPosition(idx) - def __setitem__(self, idx, value): self.setComponentByPosition(idx, value) + def __setitem__(self, idx, value): + self.setComponentByPosition(idx, value) + + def __len__(self): + return len(self._componentValues) - def __len__(self): return len(self._componentValues) - def clear(self): self._componentValues = [] - self._componentValuesSet = 0 - def setDefaultComponents(self): pass + # backward compatibility + def getComponentTagMap(self): + return self.componentTagMap \ No newline at end of file diff --git a/src/lib/pyasn1/type/char.py b/src/lib/pyasn1/type/char.py index ae112f8bd..039e53660 100644 --- a/src/lib/pyasn1/type/char.py +++ b/src/lib/pyasn1/type/char.py @@ -1,61 +1,378 @@ -# ASN.1 "character string" types +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys from pyasn1.type import univ, tag +from pyasn1 import error -class UTF8String(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( - tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12) - ) - encoding = "utf-8" -class NumericString(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( +__all__ = ['NumericString', 'PrintableString', 'TeletexString', 'T61String', 'VideotexString', + 'IA5String', 'GraphicString', 'VisibleString', 'ISO646String', + 'GeneralString', 'UniversalString', 'BMPString', 'UTF8String'] + +NoValue = univ.NoValue +noValue = univ.noValue + + +class AbstractCharacterString(univ.OctetString): + """Creates |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python 2 :class:`unicode` or Python 3 :class:`str`. + When used in octet-stream context, |ASN.1| type assumes "|encoding|" encoding. + + Parameters + ---------- + value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object + unicode object (Python 2) or string (Python 3), alternatively string + (Python 2) or bytes (Python 3) representing octet-stream of serialized + unicode string (note `encoding` parameter) or |ASN.1| class instance. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :class:`unicode` (Python 2) or + :class:`str` (Python 3) the payload when |ASN.1| object is used + in octet-stream context. + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + + if sys.version_info[0] <= 2: + def __str__(self): + try: + return self._value.encode(self._encoding) + except UnicodeEncodeError: + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (self._value, self._encoding) + ) + + def __unicode__(self): + return unicode(self._value) + + def prettyIn(self, value): + if isinstance(value, unicode): + return value + elif isinstance(value, str): + try: + return value.decode(self._encoding) + except (LookupError, UnicodeDecodeError): + raise error.PyAsn1Error( + 'Can\'t decode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + elif isinstance(value, (tuple, list)): + try: + return self.prettyIn(''.join([chr(x) for x in value])) + except ValueError: + raise error.PyAsn1Error( + 'Bad %s initializer \'%s\'' % (self.__class__.__name__, value) + ) + else: + try: + return unicode(value) + except UnicodeDecodeError: + raise error.PyAsn1Error( + 'Can\'t turn object \'%s\' into unicode' % (value,) + ) + + def asOctets(self, padding=True): + return str(self) + + def asNumbers(self, padding=True): + return tuple([ord(x) for x in str(self)]) + + else: + def __str__(self): + return str(self._value) + + def __bytes__(self): + try: + return self._value.encode(self._encoding) + except UnicodeEncodeError: + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (self._value, self._encoding) + ) + + def prettyIn(self, value): + if isinstance(value, str): + return value + elif isinstance(value, bytes): + try: + return value.decode(self._encoding) + except UnicodeDecodeError: + raise error.PyAsn1Error( + 'Can\'t decode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + elif isinstance(value, (tuple, list)): + return self.prettyIn(bytes(value)) + else: + try: + return str(value) + except (UnicodeDecodeError, ValueError): + raise error.PyAsn1Error( + 'Can\'t turn object \'%s\' into unicode' % (value,) + ) + + def asOctets(self, padding=True): + return bytes(self) + + def asNumbers(self, padding=True): + return tuple(bytes(self)) + + def prettyOut(self, value): + return value + + def __reversed__(self): + return reversed(self._value) + + def clone(self, value=noValue, tagSet=None, subtypeSpec=None, + encoding=None, binValue=noValue, hexValue=noValue): + """Creates a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object + unicode object (Python 2) or string (Python 3), alternatively string + (Python 2) or bytes (Python 3) representing octet-stream of serialized + unicode string (note `encoding` parameter) or |ASN.1| class instance. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :py:class:`unicode` (Python 2) or + :py:class:`str` (Python 3) the payload when |ASN.1| object is used + in octet-stream context. + + Returns + ------- + : + new instance of |ASN.1| type/value + + """ + return univ.OctetString.clone(self, value, tagSet, subtypeSpec, encoding, binValue, hexValue) + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, + subtypeSpec=None, encoding=None, binValue=noValue, hexValue=noValue): + """Creates a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object + unicode object (Python 2) or string (Python 3), alternatively string + (Python 2) or bytes (Python 3) representing octet-stream of serialized + unicode string (note `encoding` parameter) or |ASN.1| class instance. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :py:class:`unicode` (Python 2) or + :py:class:`str` (Python 3) the payload when |ASN.1| object is used + in octet-stream context. + + Returns + ------- + : + new instance of |ASN.1| type/value + + """ + return univ.OctetString.subtype(self, value, implicitTag, explicitTag, subtypeSpec, encoding, binValue, hexValue) + + +class NumericString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18) - ) + ) + encoding = 'us-ascii' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + -class PrintableString(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( +class PrintableString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19) - ) + ) + encoding = 'us-ascii' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + -class TeletexString(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( +class TeletexString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20) - ) - + ) + encoding = 'iso-8859-1' + -class VideotexString(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( +class T61String(TeletexString): + __doc__ = TeletexString.__doc__ + + +class VideotexString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21) - ) + ) + encoding = 'iso-8859-1' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + -class IA5String(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( +class IA5String(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22) - ) + ) + encoding = 'us-ascii' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + -class GraphicString(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( +class GraphicString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25) - ) + ) + encoding = 'iso-8859-1' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() -class VisibleString(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( + +class VisibleString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26) - ) + ) + encoding = 'us-ascii' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class ISO646String(VisibleString): + __doc__ = VisibleString.__doc__ -class GeneralString(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( + +class GeneralString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27) - ) + ) + encoding = 'iso-8859-1' + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class UniversalString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ -class UniversalString(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28) - ) + ) encoding = "utf-32-be" -class BMPString(univ.OctetString): - tagSet = univ.OctetString.tagSet.tagImplicitly( + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class BMPString(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30) - ) + ) encoding = "utf-16-be" + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() + + +class UTF8String(AbstractCharacterString): + __doc__ = AbstractCharacterString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = AbstractCharacterString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12) + ) + encoding = "utf-8" + + # Optimization for faster codec lookup + typeId = AbstractCharacterString.getTypeId() diff --git a/src/lib/pyasn1/type/constraint.py b/src/lib/pyasn1/type/constraint.py index 66873937d..7f96c507d 100644 --- a/src/lib/pyasn1/type/constraint.py +++ b/src/lib/pyasn1/type/constraint.py @@ -1,86 +1,149 @@ # -# ASN.1 subtype constraints classes. +# This file is part of pyasn1 software. # -# Constraints are relatively rare, but every ASN1 object -# is doing checks all the time for whether they have any -# constraints and whether they are applicable to the object. +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html # -# What we're going to do is define objects/functions that -# can be called unconditionally if they are present, and that -# are simply not present if there are no constraints. -# -# Original concept and code by Mike C. Fletcher. +# Original concept and code by Mike C. Fletcher. # import sys from pyasn1.type import error -class AbstractConstraint: +__all__ = ['SingleValueConstraint', 'ContainedSubtypeConstraint', 'ValueRangeConstraint', + 'ValueSizeConstraint', 'PermittedAlphabetConstraint', 'InnerTypeConstraint', + 'ConstraintsExclusion', 'ConstraintsIntersection', 'ConstraintsUnion'] + + +class AbstractConstraint(object): """Abstract base-class for constraint objects Constraints should be stored in a simple sequence in the - namespace of their client Asn1Item sub-classes. + namespace of their client Asn1Item sub-classes in cases + when ASN.1 constraint is define. """ + def __init__(self, *values): - self._valueMap = {} + self._valueMap = set() self._setValues(values) self.__hashedValues = None + def __call__(self, value, idx=None): + if not self._values: + return + try: self._testValue(value, idx) + except error.ValueConstraintError: raise error.ValueConstraintError( - '%s failed at: \"%s\"' % (self, sys.exc_info()[1]) + '%s failed at: %r' % (self, sys.exc_info()[1]) ) + def __repr__(self): return '%s(%s)' % ( self.__class__.__name__, ', '.join([repr(x) for x in self._values]) ) + def __eq__(self, other): return self is other and True or self._values == other - def __ne__(self, other): return self._values != other - def __lt__(self, other): return self._values < other - def __le__(self, other): return self._values <= other - def __gt__(self, other): return self._values > other - def __ge__(self, other): return self._values >= other + + def __ne__(self, other): + return self._values != other + + def __lt__(self, other): + return self._values < other + + def __le__(self, other): + return self._values <= other + + def __gt__(self, other): + return self._values > other + + def __ge__(self, other): + return self._values >= other + if sys.version_info[0] <= 2: - def __nonzero__(self): return bool(self._values) + def __nonzero__(self): + return self._values and True or False else: - def __bool__(self): return bool(self._values) + def __bool__(self): + return self._values and True or False def __hash__(self): if self.__hashedValues is None: self.__hashedValues = hash((self.__class__.__name__, self._values)) return self.__hashedValues - def _setValues(self, values): self._values = values + # descriptor protocol + + def __get__(self, instance, owner): + if instance is None: + return self + + # This is a bit of hack: look up instance attribute first, + # then try class attribute if instance attribute with that + # name is not available. + # The rationale is to have `.subtypeSpec`/`.sizeSpec` readable-writeable + # as a class attribute and read-only as instance attribute. + try: + return instance._subtypeSpec + + except AttributeError: + try: + return instance._sizeSpec + + except AttributeError: + return self + + def __set__(self, instance, value): + raise AttributeError('attribute is read-only') + + def _setValues(self, values): + self._values = values + def _testValue(self, value, idx): raise error.ValueConstraintError(value) # Constraints derivation logic - def getValueMap(self): return self._valueMap + def getValueMap(self): + return self._valueMap + def isSuperTypeOf(self, otherConstraint): - return self in otherConstraint.getValueMap() or \ - otherConstraint is self or otherConstraint == self + return (otherConstraint is self or + not self._values or + otherConstraint == self or + self in otherConstraint.getValueMap()) + def isSubTypeOf(self, otherConstraint): - return otherConstraint in self._valueMap or \ - otherConstraint is self or otherConstraint == self + return (otherConstraint is self or + not self or + otherConstraint == self or + otherConstraint in self._valueMap) class SingleValueConstraint(AbstractConstraint): """Value must be part of defined values constraint""" + + def _setValues(self, values): + self._values = values + self._set = set(values) + def _testValue(self, value, idx): - # XXX index vals for performance? - if value not in self._values: + if value not in self._set: raise error.ValueConstraintError(value) + class ContainedSubtypeConstraint(AbstractConstraint): """Value must satisfy all of defined set of constraints""" + def _testValue(self, value, idx): for c in self._values: c(value, idx) + class ValueRangeConstraint(AbstractConstraint): """Value must be within start and stop values (inclusive)""" + def _testValue(self, value, idx): if value < self.start or value > self.stop: raise error.ValueConstraintError(value) @@ -89,7 +152,7 @@ def _setValues(self, values): if len(values) != 2: raise error.PyAsn1Error( '%s: bad constraint values' % (self.__class__.__name__,) - ) + ) self.start, self.stop = values if self.start > self.stop: raise error.PyAsn1Error( @@ -99,28 +162,31 @@ def _setValues(self, values): ) ) AbstractConstraint._setValues(self, values) - + + class ValueSizeConstraint(ValueRangeConstraint): """len(value) must be within start and stop values (inclusive)""" + def _testValue(self, value, idx): - l = len(value) - if l < self.start or l > self.stop: + valueSize = len(value) + if valueSize < self.start or valueSize > self.stop: raise error.ValueConstraintError(value) + class PermittedAlphabetConstraint(SingleValueConstraint): def _setValues(self, values): - self._values = () - for v in values: - self._values = self._values + tuple(v) + self._values = values + self._set = set(values) def _testValue(self, value, idx): - for v in value: - if v not in self._values: - raise error.ValueConstraintError(value) + if not self._set.issuperset(value): + raise error.ValueConstraintError(value) + -# This is a bit kludgy, meaning two op modes within a single constraing +# This is a bit kludgy, meaning two op modes within a single constraint class InnerTypeConstraint(AbstractConstraint): """Value must satisfy type and presense constraints""" + def _testValue(self, value, idx): if self.__singleTypeConstraint: self.__singleTypeConstraint(value) @@ -128,7 +194,7 @@ def _testValue(self, value, idx): if idx not in self.__multipleTypeConstraint: raise error.ValueConstraintError(value) constraint, status = self.__multipleTypeConstraint[idx] - if status == 'ABSENT': # XXX presense is not checked! + if status == 'ABSENT': # XXX presense is not checked! raise error.ValueConstraintError(value) constraint(value) @@ -142,10 +208,12 @@ def _setValues(self, values): self.__singleTypeConstraint = v AbstractConstraint._setValues(self, values) -# Boolean ops on constraints + +# Boolean ops on constraints class ConstraintsExclusion(AbstractConstraint): """Value must not fit the single constraint""" + def _testValue(self, value, idx): try: self._values[0](value, idx) @@ -159,42 +227,57 @@ def _setValues(self, values): raise error.PyAsn1Error('Single constraint expected') AbstractConstraint._setValues(self, values) + class AbstractConstraintSet(AbstractConstraint): """Value must not satisfy the single constraint""" - def __getitem__(self, idx): return self._values[idx] - def __add__(self, value): return self.__class__(self, value) - def __radd__(self, value): return self.__class__(self, value) + def __getitem__(self, idx): + return self._values[idx] - def __len__(self): return len(self._values) + def __iter__(self): + return iter(self._values) + + def __add__(self, value): + return self.__class__(*(self._values + (value,))) + + def __radd__(self, value): + return self.__class__(*((value,) + self._values)) + + def __len__(self): + return len(self._values) # Constraints inclusion in sets - + def _setValues(self, values): self._values = values - for v in values: - self._valueMap[v] = 1 - self._valueMap.update(v.getValueMap()) + for constraint in values: + if constraint: + self._valueMap.add(constraint) + self._valueMap.update(constraint.getValueMap()) + class ConstraintsIntersection(AbstractConstraintSet): """Value must satisfy all constraints""" + def _testValue(self, value, idx): - for v in self._values: - v(value, idx) + for constraint in self._values: + constraint(value, idx) + class ConstraintsUnion(AbstractConstraintSet): """Value must satisfy at least one constraint""" + def _testValue(self, value, idx): - for v in self._values: + for constraint in self._values: try: - v(value, idx) + constraint(value, idx) except error.ValueConstraintError: pass else: return raise error.ValueConstraintError( 'all of %s failed for \"%s\"' % (self._values, value) - ) + ) # XXX # add tests for type check diff --git a/src/lib/pyasn1/type/error.py b/src/lib/pyasn1/type/error.py index 3e6848447..cbfa276a8 100644 --- a/src/lib/pyasn1/type/error.py +++ b/src/lib/pyasn1/type/error.py @@ -1,3 +1,11 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# from pyasn1.error import PyAsn1Error -class ValueConstraintError(PyAsn1Error): pass + +class ValueConstraintError(PyAsn1Error): + pass diff --git a/src/lib/pyasn1/type/namedtype.py b/src/lib/pyasn1/type/namedtype.py index 48967a5fe..3f9ae1904 100644 --- a/src/lib/pyasn1/type/namedtype.py +++ b/src/lib/pyasn1/type/namedtype.py @@ -1,132 +1,475 @@ -# NamedType specification for constructed types +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# import sys from pyasn1.type import tagmap from pyasn1 import error -class NamedType: - isOptional = 0 - isDefaulted = 0 - def __init__(self, name, t): - self.__name = name; self.__type = t - def __repr__(self): return '%s(%s, %s)' % ( - self.__class__.__name__, self.__name, self.__type - ) - def getType(self): return self.__type - def getName(self): return self.__name +__all__ = ['NamedType', 'OptionalNamedType', 'DefaultedNamedType', 'NamedTypes'] + + +class NamedType(object): + """Create named field object for a constructed ASN.1 type. + + The |NamedType| object represents a single name and ASN.1 type of a constructed ASN.1 type. + + |NamedType| objects are immutable and duck-type Python :class:`tuple` objects + holding *name* and *asn1Object* components. + + Parameters + ---------- + name: :py:class:`str` + Field name + + asn1Object: + ASN.1 type object + """ + isOptional = False + isDefaulted = False + + def __init__(self, name, asn1Object): + self.__name = name + self.__type = asn1Object + self.__nameAndType = name, asn1Object + + def __repr__(self): + return '%s(%r, %r)' % (self.__class__.__name__, self.__name, self.__type) + + def __eq__(self, other): + return self.__nameAndType == other + + def __ne__(self, other): + return self.__nameAndType != other + + def __lt__(self, other): + return self.__nameAndType < other + + def __le__(self, other): + return self.__nameAndType <= other + + def __gt__(self, other): + return self.__nameAndType > other + + def __ge__(self, other): + return self.__nameAndType >= other + + def __hash__(self): + return hash(self.__nameAndType) + def __getitem__(self, idx): - if idx == 0: return self.__name - if idx == 1: return self.__type - raise IndexError() + return self.__nameAndType[idx] + + def __iter__(self): + return iter(self.__nameAndType) + + @property + def name(self): + return self.__name + @property + def asn1Object(self): + return self.__type + + # Backward compatibility + + def getName(self): + return self.name + + def getType(self): + return self.asn1Object + + class OptionalNamedType(NamedType): - isOptional = 1 + __doc__ = NamedType.__doc__ + + isOptional = True + + class DefaultedNamedType(NamedType): - isDefaulted = 1 - -class NamedTypes: + __doc__ = NamedType.__doc__ + + isDefaulted = True + + +class NamedTypes(object): + """Create a collection of named fields for a constructed ASN.1 type. + + The NamedTypes object represents a collection of named fields of a constructed ASN.1 type. + + *NamedTypes* objects are immutable and duck-type Python :class:`dict` objects + holding *name* as keys and ASN.1 type object as values. + + Parameters + ---------- + *namedTypes: :class:`~pyasn1.type.namedtype.NamedType` + """ def __init__(self, *namedTypes): self.__namedTypes = namedTypes self.__namedTypesLen = len(self.__namedTypes) self.__minTagSet = None - self.__tagToPosIdx = {}; self.__nameToPosIdx = {} - self.__tagMap = { False: None, True: None } - self.__ambigiousTypes = {} + self.__tagToPosMapImpl = None + self.__nameToPosMapImpl = None + self.__ambigiousTypesImpl = None + self.__tagMap = {} + self.__hasOptionalOrDefault = None + self.__requiredComponents = None def __repr__(self): - r = '%s(' % self.__class__.__name__ - for n in self.__namedTypes: - r = r + '%r, ' % (n,) - return r + ')' - - def __getitem__(self, idx): return self.__namedTypes[idx] + return '%s(%s)' % ( + self.__class__.__name__, ', '.join([repr(x) for x in self.__namedTypes]) + ) + + def __eq__(self, other): + return self.__namedTypes == other + + def __ne__(self, other): + return self.__namedTypes != other + + def __lt__(self, other): + return self.__namedTypes < other + + def __le__(self, other): + return self.__namedTypes <= other + + def __gt__(self, other): + return self.__namedTypes > other + + def __ge__(self, other): + return self.__namedTypes >= other + + def __hash__(self): + return hash(self.__namedTypes) + + def __getitem__(self, idx): + try: + return self.__namedTypes[idx] + + except TypeError: + return self.__namedTypes[self.__nameToPosMap[idx]] + + def __contains__(self, key): + return key in self.__nameToPosMap + + def __iter__(self): + return (x[0] for x in self.__namedTypes) if sys.version_info[0] <= 2: - def __nonzero__(self): return bool(self.__namedTypesLen) + def __nonzero__(self): + return self.__namedTypesLen > 0 else: - def __bool__(self): return bool(self.__namedTypesLen) - def __len__(self): return self.__namedTypesLen - + def __bool__(self): + return self.__namedTypesLen > 0 + + def __len__(self): + return self.__namedTypesLen + + # Python dict protocol + + def values(self): + return (namedType.asn1Object for namedType in self.__namedTypes) + + def keys(self): + return (namedType.name for namedType in self.__namedTypes) + + def items(self): + return ((namedType.name, namedType.asn1Object) for namedType in self.__namedTypes) + + def clone(self): + return self.__class__(*self.__namedTypes) + + @property + def __tagToPosMap(self): + if self.__tagToPosMapImpl is None: + self.__tagToPosMapImpl = {} + for idx, namedType in enumerate(self.__namedTypes): + tagMap = namedType.asn1Object.tagMap + if not tagMap: + continue + for _tagSet in tagMap.presentTypes: + if _tagSet in self.__tagToPosMapImpl: + raise error.PyAsn1Error('Duplicate type %s in %s' % (_tagSet, namedType)) + self.__tagToPosMapImpl[_tagSet] = idx + + return self.__tagToPosMapImpl + + @property + def __nameToPosMap(self): + if self.__nameToPosMapImpl is None: + self.__nameToPosMapImpl = {} + for idx, namedType in enumerate(self.__namedTypes): + if namedType.name in self.__nameToPosMapImpl: + raise error.PyAsn1Error('Duplicate name %s in %s' % (namedType.name, namedType)) + self.__nameToPosMapImpl[namedType.name] = idx + + return self.__nameToPosMapImpl + + @property + def __ambigiousTypes(self): + if self.__ambigiousTypesImpl is None: + self.__ambigiousTypesImpl = {} + ambigiousTypes = () + for idx, namedType in reversed(tuple(enumerate(self.__namedTypes))): + if namedType.isOptional or namedType.isDefaulted: + ambigiousTypes = (namedType,) + ambigiousTypes + else: + ambigiousTypes = (namedType,) + self.__ambigiousTypesImpl[idx] = NamedTypes(*ambigiousTypes) + return self.__ambigiousTypesImpl + def getTypeByPosition(self, idx): - if idx < 0 or idx >= self.__namedTypesLen: + """Return ASN.1 type object by its position in fields set. + + Parameters + ---------- + idx: :py:class:`int` + Field index + + Returns + ------- + : + ASN.1 type + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If given position is out of fields range + """ + try: + return self.__namedTypes[idx].asn1Object + + except IndexError: raise error.PyAsn1Error('Type position out of range') - else: - return self.__namedTypes[idx].getType() def getPositionByType(self, tagSet): - if not self.__tagToPosIdx: - idx = self.__namedTypesLen - while idx > 0: - idx = idx - 1 - tagMap = self.__namedTypes[idx].getType().getTagMap() - for t in tagMap.getPosMap(): - if t in self.__tagToPosIdx: - raise error.PyAsn1Error('Duplicate type %s' % (t,)) - self.__tagToPosIdx[t] = idx + """Return field position by its ASN.1 type. + + Parameters + ---------- + tagSet: :class:`~pysnmp.type.tag.TagSet` + ASN.1 tag set distinguishing one ASN.1 type from others. + + Returns + ------- + : :py:class:`int` + ASN.1 type position in fields set + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If *tagSet* is not present or ASN.1 types are not unique within callee *NamedTypes* + """ try: - return self.__tagToPosIdx[tagSet] + return self.__tagToPosMap[tagSet] + except KeyError: raise error.PyAsn1Error('Type %s not found' % (tagSet,)) - + def getNameByPosition(self, idx): + """Return field name by its position in fields set. + + Parameters + ---------- + idx: :py:class:`idx` + Field index + + Returns + ------- + : :py:class:`str` + Field name + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If given field name is not present in callee *NamedTypes* + """ try: - return self.__namedTypes[idx].getName() + return self.__namedTypes[idx].name + except IndexError: raise error.PyAsn1Error('Type position out of range') + def getPositionByName(self, name): - if not self.__nameToPosIdx: - idx = self.__namedTypesLen - while idx > 0: - idx = idx - 1 - n = self.__namedTypes[idx].getName() - if n in self.__nameToPosIdx: - raise error.PyAsn1Error('Duplicate name %s' % (n,)) - self.__nameToPosIdx[n] = idx + """Return field position by filed name. + + Parameters + ---------- + name: :py:class:`str` + Field name + + Returns + ------- + : :py:class:`int` + Field position in fields set + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If *name* is not present or not unique within callee *NamedTypes* + """ try: - return self.__nameToPosIdx[name] + return self.__nameToPosMap[name] + except KeyError: raise error.PyAsn1Error('Name %s not found' % (name,)) - def __buildAmbigiousTagMap(self): - ambigiousTypes = () - idx = self.__namedTypesLen - while idx > 0: - idx = idx - 1 - t = self.__namedTypes[idx] - if t.isOptional or t.isDefaulted: - ambigiousTypes = (t, ) + ambigiousTypes - else: - ambigiousTypes = (t, ) - self.__ambigiousTypes[idx] = NamedTypes(*ambigiousTypes) - def getTagMapNearPosition(self, idx): - if not self.__ambigiousTypes: self.__buildAmbigiousTagMap() + """Return ASN.1 types that are allowed at or past given field position. + + Some ASN.1 serialization allow for skipping optional and defaulted fields. + Some constructed ASN.1 types allow reordering of the fields. When recovering + such objects it may be important to know which types can possibly be + present at any given position in the field sets. + + Parameters + ---------- + idx: :py:class:`int` + Field index + + Returns + ------- + : :class:`~pyasn1.type.tagmap.TagMap` + Map if ASN.1 types allowed at given field position + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If given position is out of fields range + """ try: return self.__ambigiousTypes[idx].getTagMap() + except KeyError: raise error.PyAsn1Error('Type position out of range') def getPositionNearType(self, tagSet, idx): - if not self.__ambigiousTypes: self.__buildAmbigiousTagMap() + """Return the closest field position where given ASN.1 type is allowed. + + Some ASN.1 serialization allow for skipping optional and defaulted fields. + Some constructed ASN.1 types allow reordering of the fields. When recovering + such objects it may be important to know at which field position, in field set, + given *tagSet* is allowed at or past *idx* position. + + Parameters + ---------- + tagSet: :class:`~pyasn1.type.tag.TagSet` + ASN.1 type which field position to look up + + idx: :py:class:`int` + Field position at or past which to perform ASN.1 type look up + + Returns + ------- + : :py:class:`int` + Field position in fields set + + Raises + ------ + : :class:`~pyasn1.error.PyAsn1Error` + If *tagSet* is not present or not unique within callee *NamedTypes* + or *idx* is out of fields range + """ try: - return idx+self.__ambigiousTypes[idx].getPositionByType(tagSet) + return idx + self.__ambigiousTypes[idx].getPositionByType(tagSet) + except KeyError: raise error.PyAsn1Error('Type position out of range') - def genMinTagSet(self): + @property + def minTagSet(self): + """Return the minimal TagSet among ASN.1 type in callee *NamedTypes*. + + Some ASN.1 types/serialization protocols require ASN.1 types to be + arranged based on their numerical tag value. The *minTagSet* property + returns that. + + Returns + ------- + : :class:`~pyasn1.type.tagset.TagSet` + Minimal TagSet among ASN.1 types in callee *NamedTypes* + """ if self.__minTagSet is None: - for t in self.__namedTypes: - __type = t.getType() - tagSet = getattr(__type,'getMinTagSet',__type.getTagSet)() + for namedType in self.__namedTypes: + asn1Object = namedType.asn1Object + try: + tagSet = asn1Object.getMinTagSet() + + except AttributeError: + tagSet = asn1Object.tagSet if self.__minTagSet is None or tagSet < self.__minTagSet: self.__minTagSet = tagSet return self.__minTagSet - - def getTagMap(self, uniq=False): - if self.__tagMap[uniq] is None: - tagMap = tagmap.TagMap() - for nt in self.__namedTypes: - tagMap = tagMap.clone( - nt.getType(), nt.getType().getTagMap(), uniq - ) - self.__tagMap[uniq] = tagMap - return self.__tagMap[uniq] + + def getTagMap(self, unique=False): + """Create a *TagMap* object from tags and types recursively. + + Create a new :class:`~pyasn1.type.tagmap.TagMap` object by + combining tags from *TagMap* objects of children types and + associating them with their immediate child type. + + Example + ------- + + .. code-block:: python + + OuterType ::= CHOICE { + innerType INTEGER + } + + Calling *.getTagMap()* on *OuterType* will yield a map like this: + + .. code-block:: python + + Integer.tagSet -> Choice + + Parameters + ---------- + unique: :py:class:`bool` + If `True`, duplicate *TagSet* objects occurring while building + new *TagMap* would cause error. + + Returns + ------- + : :class:`~pyasn1.type.tagmap.TagMap` + New *TagMap* holding *TagSet* object gathered from childen types. + """ + if unique not in self.__tagMap: + presentTypes = {} + skipTypes = {} + defaultType = None + for namedType in self.__namedTypes: + tagMap = namedType.asn1Object.tagMap + for tagSet in tagMap: + if unique and tagSet in presentTypes: + raise error.PyAsn1Error('Non-unique tagSet %s' % (tagSet,)) + presentTypes[tagSet] = namedType.asn1Object + skipTypes.update(tagMap.skipTypes) + + if defaultType is None: + defaultType = tagMap.defaultType + elif tagMap.defaultType is not None: + raise error.PyAsn1Error('Duplicate default ASN.1 type at %s' % (self,)) + + self.__tagMap[unique] = tagmap.TagMap(presentTypes, skipTypes, defaultType) + + return self.__tagMap[unique] + + @property + def hasOptionalOrDefault(self): + if self.__hasOptionalOrDefault is None: + self.__hasOptionalOrDefault = bool([True for namedType in self.__namedTypes if namedType.isDefaulted or namedType.isOptional]) + return self.__hasOptionalOrDefault + + @property + def namedTypes(self): + return iter(self.__namedTypes) + + @property + def requiredComponents(self): + if self.__requiredComponents is None: + self.__requiredComponents = frozenset( + [idx for idx, nt in enumerate(self.__namedTypes) if not nt.isOptional and not nt.isDefaulted] + ) + return self.__requiredComponents diff --git a/src/lib/pyasn1/type/namedval.py b/src/lib/pyasn1/type/namedval.py index d0fea7cc7..bcdbf153b 100644 --- a/src/lib/pyasn1/type/namedval.py +++ b/src/lib/pyasn1/type/namedval.py @@ -1,12 +1,21 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# # ASN.1 named integers +# from pyasn1 import error -__all__ = [ 'NamedValues' ] +__all__ = ['NamedValues'] -class NamedValues: + +class NamedValues(object): def __init__(self, *namedValues): - self.nameToValIdx = {}; self.valToNameIdx = {} - self.namedValues = () + self.nameToValIdx = {} + self.valToNameIdx = {} + self.namedValues = () automaticVal = 1 for namedValue in namedValues: if isinstance(namedValue, tuple): @@ -21,9 +30,35 @@ def __init__(self, *namedValues): raise error.PyAsn1Error('Duplicate value %s=%s' % (name, val)) self.valToNameIdx[val] = name self.namedValues = self.namedValues + ((name, val),) - automaticVal = automaticVal + 1 - def __str__(self): return str(self.namedValues) - + automaticVal += 1 + + def __repr__(self): + return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues])) + + def __str__(self): + return str(self.namedValues) + + def __eq__(self, other): + return tuple(self) == tuple(other) + + def __ne__(self, other): + return tuple(self) != tuple(other) + + def __lt__(self, other): + return tuple(self) < tuple(other) + + def __le__(self, other): + return tuple(self) <= tuple(other) + + def __gt__(self, other): + return tuple(self) > tuple(other) + + def __ge__(self, other): + return tuple(self) >= tuple(other) + + def __hash__(self): + return hash(tuple(self)) + def getName(self, value): if value in self.valToNameIdx: return self.valToNameIdx[value] @@ -31,15 +66,28 @@ def getName(self, value): def getValue(self, name): if name in self.nameToValIdx: return self.nameToValIdx[name] - - def __getitem__(self, i): return self.namedValues[i] - def __len__(self): return len(self.namedValues) + + def getValues(self, *names): + try: + return [self.nameToValIdx[name] for name in names] + + except KeyError: + raise error.PyAsn1Error( + 'Unknown bit identifier(s): %s' % (set(names).difference(self.nameToValIdx),) + ) + + def __getitem__(self, i): + return self.namedValues[i] + + def __len__(self): + return len(self.namedValues) def __add__(self, namedValues): return self.__class__(*self.namedValues + namedValues) + def __radd__(self, namedValues): return self.__class__(*namedValues + tuple(self)) - + def clone(self, *namedValues): return self.__class__(*tuple(self) + namedValues) diff --git a/src/lib/pyasn1/type/tag.py b/src/lib/pyasn1/type/tag.py index 1144907fa..aaf185720 100644 --- a/src/lib/pyasn1/type/tag.py +++ b/src/lib/pyasn1/type/tag.py @@ -1,122 +1,342 @@ -# ASN.1 types tags -from operator import getitem +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# from pyasn1 import error +__all__ = ['tagClassUniversal', 'tagClassApplication', 'tagClassContext', + 'tagClassPrivate', 'tagFormatSimple', 'tagFormatConstructed', + 'tagCategoryImplicit', 'tagCategoryExplicit', 'tagCategoryUntagged', + 'Tag', 'TagSet'] + +#: Identifier for ASN.1 class UNIVERSAL tagClassUniversal = 0x00 + +#: Identifier for ASN.1 class APPLICATION tagClassApplication = 0x40 + +#: Identifier for ASN.1 class context-specific tagClassContext = 0x80 + +#: Identifier for ASN.1 class private tagClassPrivate = 0xC0 +#: Identifier for "simple" ASN.1 structure (e.g. scalar) tagFormatSimple = 0x00 + +#: Identifier for "constructed" ASN.1 structure (e.g. may have inner components) tagFormatConstructed = 0x20 tagCategoryImplicit = 0x01 tagCategoryExplicit = 0x02 tagCategoryUntagged = 0x04 -class Tag: + +class Tag(object): + """Create ASN.1 tag + + Represents ASN.1 tag that can be attached to a ASN.1 type to make + types distinguishable from each other. + + *Tag* objects are immutable and duck-type Python :class:`tuple` objects + holding three integer components of a tag. + + Parameters + ---------- + tagClass: :py:class:`int` + Tag *class* value + + tagFormat: :py:class:`int` + Tag *format* value + + tagId: :py:class:`int` + Tag ID value + """ def __init__(self, tagClass, tagFormat, tagId): if tagId < 0: - raise error.PyAsn1Error( - 'Negative tag ID (%s) not allowed' % (tagId,) - ) - self.__tag = (tagClass, tagFormat, tagId) - self.uniq = (tagClass, tagId) - self.__hashedUniqTag = hash(self.uniq) + raise error.PyAsn1Error('Negative tag ID (%s) not allowed' % tagId) + self.__tagClass = tagClass + self.__tagFormat = tagFormat + self.__tagId = tagId + self.__tagClassId = tagClass, tagId + self.__lazyHash = None + + def __str__(self): + return '[%s:%s:%s]' % (self.__tagClass, self.__tagFormat, self.__tagId) def __repr__(self): return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % ( - (self.__class__.__name__,) + self.__tag - ) - # These is really a hotspot -- expose public "uniq" attribute to save on - # function calls - def __eq__(self, other): return self.uniq == other.uniq - def __ne__(self, other): return self.uniq != other.uniq - def __lt__(self, other): return self.uniq < other.uniq - def __le__(self, other): return self.uniq <= other.uniq - def __gt__(self, other): return self.uniq > other.uniq - def __ge__(self, other): return self.uniq >= other.uniq - def __hash__(self): return self.__hashedUniqTag - def __getitem__(self, idx): return self.__tag[idx] + (self.__class__.__name__, self.__tagClass, self.__tagFormat, self.__tagId) + ) + + def __eq__(self, other): + return self.__tagClassId == other + + def __ne__(self, other): + return self.__tagClassId != other + + def __lt__(self, other): + return self.__tagClassId < other + + def __le__(self, other): + return self.__tagClassId <= other + + def __gt__(self, other): + return self.__tagClassId > other + + def __ge__(self, other): + return self.__tagClassId >= other + + def __hash__(self): + if self.__lazyHash is None: + self.__lazyHash = hash(self.__tagClassId) + return self.__lazyHash + + def __getitem__(self, idx): + if idx == 0: + return self.__tagClass + elif idx == 1: + return self.__tagFormat + elif idx == 2: + return self.__tagId + else: + raise IndexError() + + def __iter__(self): + yield self.__tagClass + yield self.__tagFormat + yield self.__tagId + def __and__(self, otherTag): - (tagClass, tagFormat, tagId) = otherTag - return self.__class__( - self.__tag&tagClass, self.__tag&tagFormat, self.__tag&tagId - ) + return self.__class__(self.__tagClass & otherTag.tagClass, + self.__tagFormat & otherTag.tagFormat, + self.__tagId & otherTag.tagId) + def __or__(self, otherTag): - (tagClass, tagFormat, tagId) = otherTag - return self.__class__( - self.__tag[0]|tagClass, - self.__tag[1]|tagFormat, - self.__tag[2]|tagId - ) - def asTuple(self): return self.__tag # __getitem__() is slow - -class TagSet: + return self.__class__(self.__tagClass | otherTag.tagClass, + self.__tagFormat | otherTag.tagFormat, + self.__tagId | otherTag.tagId) + + @property + def tagClass(self): + """ASN.1 tag class + + Returns + ------- + : :py:class:`int` + Tag class + """ + return self.__tagClass + + @property + def tagFormat(self): + """ASN.1 tag format + + Returns + ------- + : :py:class:`int` + Tag format + """ + return self.__tagFormat + + @property + def tagId(self): + """ASN.1 tag ID + + Returns + ------- + : :py:class:`int` + Tag ID + """ + return self.__tagId + + +class TagSet(object): + """Create a collection of ASN.1 tags + + Represents a combination of :class:`~pyasn1.type.tag.Tag` objects + that can be attached to a ASN.1 type to make types distinguishable + from each other. + + *TagSet* objects are immutable and duck-type Python :class:`tuple` objects + holding arbitrary number of :class:`~pyasn1.type.tag.Tag` objects. + + Parameters + ---------- + baseTag: :class:`~pyasn1.type.tag.Tag` + Base *Tag* object. This tag survives IMPLICIT tagging. + + *superTags: :class:`~pyasn1.type.tag.Tag` + Additional *Tag* objects taking part in subtyping. + """ def __init__(self, baseTag=(), *superTags): self.__baseTag = baseTag self.__superTags = superTags - self.__hashedSuperTags = hash(superTags) - _uniq = () - for t in superTags: - _uniq = _uniq + t.uniq - self.uniq = _uniq + self.__superTagsSignature = tuple( + [(superTag.tagClass, superTag.tagId) for superTag in superTags] + ) self.__lenOfSuperTags = len(superTags) - + self.__lazyHash = None + + def __str__(self): + return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]' + def __repr__(self): return '%s(%s)' % ( - self.__class__.__name__, - ', '.join([repr(x) for x in self.__superTags]) - ) + self.__class__.__name__, '(), ' + ', '.join([repr(x) for x in self.__superTags]) + ) def __add__(self, superTag): - return self.__class__( - self.__baseTag, *self.__superTags + (superTag,) - ) + return self.__class__(self.__baseTag, *self.__superTags + (superTag,)) + def __radd__(self, superTag): - return self.__class__( - self.__baseTag, *(superTag,) + self.__superTags - ) + return self.__class__(self.__baseTag, *(superTag,) + self.__superTags) + + def __getitem__(self, i): + if i.__class__ is slice: + return self.__class__(self.__baseTag, *self.__superTags[i]) + else: + return self.__superTags[i] + + def __eq__(self, other): + return self.__superTagsSignature == other + + def __ne__(self, other): + return self.__superTagsSignature != other + + def __lt__(self, other): + return self.__superTagsSignature < other + + def __le__(self, other): + return self.__superTagsSignature <= other + + def __gt__(self, other): + return self.__superTagsSignature > other + + def __ge__(self, other): + return self.__superTagsSignature >= other + + def __hash__(self): + if self.__lazyHash is None: + self.__lazyHash = hash(self.__superTags) + return self.__lazyHash + + def __len__(self): + return self.__lenOfSuperTags + + # descriptor protocol + + def __get__(self, instance, owner): + if instance is None: + return self + + # This is a bit of hack: look up instance attribute first, + # then try class attribute if instance attribute with that + # name is not available. + # The rationale is to have `.tagSet` readable-writeable + # as a class attribute and read-only as instance attribute. + try: + return instance._tagSet + + except AttributeError: + return self + + def __set__(self, instance, value): + raise AttributeError('attribute is read-only') + + @property + def baseTag(self): + """Return base ASN.1 tag + + Returns + ------- + : :class:`~pyasn1.type.tag.Tag` + Base tag of this *TagSet* + """ + return self.__baseTag + + @property + def superTags(self): + """Return ASN.1 tags + + Returns + ------- + : :py:class:`tuple` + Tuple of :class:`~pyasn1.type.tag.Tag` objects that this *TagSet* contains + """ + return self.__superTags def tagExplicitly(self, superTag): - tagClass, tagFormat, tagId = superTag - if tagClass == tagClassUniversal: - raise error.PyAsn1Error( - 'Can\'t tag with UNIVERSAL-class tag' - ) - if tagFormat != tagFormatConstructed: - superTag = Tag(tagClass, tagFormatConstructed, tagId) + """Return explicitly tagged *TagSet* + + Create a new *TagSet* representing callee *TagSet* explicitly tagged + with passed tag(s). With explicit tagging mode, new tags are appended + to existing tag(s). + + Parameters + ---------- + superTag: :class:`~pyasn1.type.tag.Tag` + *Tag* object to tag this *TagSet* + + Returns + ------- + : :class:`~pyasn1.type.tag.TagSet` + New *TagSet* object + """ + if superTag.tagClass == tagClassUniversal: + raise error.PyAsn1Error('Can\'t tag with UNIVERSAL class tag') + if superTag.tagFormat != tagFormatConstructed: + superTag = Tag(superTag.tagClass, tagFormatConstructed, superTag.tagId) return self + superTag def tagImplicitly(self, superTag): - tagClass, tagFormat, tagId = superTag + """Return implicitly tagged *TagSet* + + Create a new *TagSet* representing callee *TagSet* implicitly tagged + with passed tag(s). With implicit tagging mode, new tag(s) replace the + last existing tag. + + Parameters + ---------- + superTag: :class:`~pyasn1.type.tag.Tag` + *Tag* object to tag this *TagSet* + + Returns + ------- + : :class:`~pyasn1.type.tag.TagSet` + New *TagSet* object + """ if self.__superTags: - superTag = Tag(tagClass, self.__superTags[-1][1], tagId) + superTag = Tag(superTag.tagClass, self.__superTags[-1].tagFormat, superTag.tagId) return self[:-1] + superTag - def getBaseTag(self): return self.__baseTag - def __getitem__(self, idx): - if isinstance(idx, slice): - return self.__class__( - self.__baseTag, *getitem(self.__superTags, idx) - ) - return self.__superTags[idx] - def __eq__(self, other): return self.uniq == other.uniq - def __ne__(self, other): return self.uniq != other.uniq - def __lt__(self, other): return self.uniq < other.uniq - def __le__(self, other): return self.uniq <= other.uniq - def __gt__(self, other): return self.uniq > other.uniq - def __ge__(self, other): return self.uniq >= other.uniq - def __hash__(self): return self.__hashedSuperTags - def __len__(self): return self.__lenOfSuperTags def isSuperTagSetOf(self, tagSet): + """Test type relationship against given *TagSet* + + The callee is considered to be a supertype of given *TagSet* + tag-wise if all tags in *TagSet* are present in the callee and + they are in the same order. + + Parameters + ---------- + tagSet: :class:`~pyasn1.type.tag.TagSet` + *TagSet* object to evaluate against the callee + + Returns + ------- + : :py:class:`bool` + `True` if callee is a supertype of *tagSet* + """ if len(tagSet) < self.__lenOfSuperTags: - return - idx = self.__lenOfSuperTags - 1 - while idx >= 0: - if self.__superTags[idx] != tagSet[idx]: - return - idx = idx - 1 - return 1 - -def initTagSet(tag): return TagSet(tag, tag) + return False + return self.__superTags == tagSet[:self.__lenOfSuperTags] + + # Backward compatibility + + def getBaseTag(self): + return self.__baseTag + +def initTagSet(tag): + return TagSet(tag, tag) diff --git a/src/lib/pyasn1/type/tagmap.py b/src/lib/pyasn1/type/tagmap.py index 7cec3a10e..8527f33dc 100644 --- a/src/lib/pyasn1/type/tagmap.py +++ b/src/lib/pyasn1/type/tagmap.py @@ -1,52 +1,102 @@ +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# from pyasn1 import error -class TagMap: - def __init__(self, posMap={}, negMap={}, defType=None): - self.__posMap = posMap.copy() - self.__negMap = negMap.copy() - self.__defType = defType - +__all__ = ['TagMap'] + + +class TagMap(object): + """Map *TagSet* objects to ASN.1 types + + Create an object mapping *TagSet* object to ASN.1 type. + + *TagMap* objects are immutable and duck-type read-only Python + :class:`dict` objects holding *TagSet* objects as keys and ASN.1 + type objects as values. + + Parameters + ---------- + presentTypes: :py:class:`dict` + Map of :class:`~pyasn1.type.tag.TagSet` to ASN.1 objects considered + as being unconditionally present in the *TagMap*. + + skipTypes: :py:class:`dict` + A collection of :class:`~pyasn1.type.tag.TagSet` objects considered + as absent in the *TagMap* even when *defaultType* is present. + + defaultType: ASN.1 type object + An ASN.1 type object callee *TagMap* returns for any *TagSet* key not present + in *presentTypes* (unless given key is present in *skipTypes*). + """ + def __init__(self, presentTypes=None, skipTypes=None, defaultType=None): + self.__presentTypes = presentTypes or {} + self.__skipTypes = skipTypes or {} + self.__defaultType = defaultType + def __contains__(self, tagSet): - return tagSet in self.__posMap or \ - self.__defType is not None and tagSet not in self.__negMap + return (tagSet in self.__presentTypes or + self.__defaultType is not None and tagSet not in self.__skipTypes) def __getitem__(self, tagSet): - if tagSet in self.__posMap: - return self.__posMap[tagSet] - elif tagSet in self.__negMap: - raise error.PyAsn1Error('Key in negative map') - elif self.__defType is not None: - return self.__defType - else: - raise KeyError() + try: + return self.__presentTypes[tagSet] + except KeyError: + if self.__defaultType is None: + raise KeyError() + elif tagSet in self.__skipTypes: + raise error.PyAsn1Error('Key in negative map') + else: + return self.__defaultType + + def __iter__(self): + return iter(self.__presentTypes) def __repr__(self): - s = '%r/%r' % (self.__posMap, self.__negMap) - if self.__defType is not None: - s = s + '/%r' % (self.__defType,) + s = self.__class__.__name__ + '(' + if self.__presentTypes: + s += 'presentTypes=%r, ' % (self.__presentTypes,) + if self.__skipTypes: + s += 'skipTypes=%r, ' % (self.__skipTypes,) + if self.__defaultType is not None: + s += 'defaultType=%r' % (self.__defaultType,) + return s + ')' + + def __str__(self): + s = self.__class__.__name__ + ': ' + if self.__presentTypes: + s += 'presentTypes: %s, ' % ', '.join([x.prettyPrintType() for x in self.__presentTypes.values()]) + if self.__skipTypes: + s += 'skipTypes: %s, ' % ', '.join([x.prettyPrintType() for x in self.__skipTypes.values()]) + if self.__defaultType is not None: + s += 'defaultType: %s, ' % self.__defaultType.prettyPrintType() return s - def clone(self, parentType, tagMap, uniq=False): - if self.__defType is not None and tagMap.getDef() is not None: - raise error.PyAsn1Error('Duplicate default value at %s' % (self,)) - if tagMap.getDef() is not None: - defType = tagMap.getDef() - else: - defType = self.__defType - - posMap = self.__posMap.copy() - for k in tagMap.getPosMap(): - if uniq and k in posMap: - raise error.PyAsn1Error('Duplicate positive key %s' % (k,)) - posMap[k] = parentType - - negMap = self.__negMap.copy() - negMap.update(tagMap.getNegMap()) - - return self.__class__( - posMap, negMap, defType, - ) - - def getPosMap(self): return self.__posMap.copy() - def getNegMap(self): return self.__negMap.copy() - def getDef(self): return self.__defType + @property + def presentTypes(self): + """Return *TagSet* to ASN.1 type map present in callee *TagMap*""" + return self.__presentTypes + + @property + def skipTypes(self): + """Return *TagSet* collection unconditionally absent in callee *TagMap*""" + return self.__skipTypes + + @property + def defaultType(self): + """Return default ASN.1 type being returned for any missing *TagSet*""" + return self.__defaultType + + # Backward compatibility + + def getPosMap(self): + return self.presentTypes + + def getNegMap(self): + return self.skipTypes + + def getDef(self): + return self.defaultType diff --git a/src/lib/pyasn1/type/univ.py b/src/lib/pyasn1/type/univ.py index 9cd16f8a2..1a146e030 100644 --- a/src/lib/pyasn1/type/univ.py +++ b/src/lib/pyasn1/type/univ.py @@ -1,18 +1,70 @@ -# ASN.1 "universal" data types -import operator, sys +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +import sys +import math from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap from pyasn1.codec.ber import eoo -from pyasn1.compat import octets +from pyasn1.compat import octets, integer, binary from pyasn1 import error +NoValue = base.NoValue +noValue = NoValue() + +__all__ = ['Integer', 'Boolean', 'BitString', 'OctetString', 'Null', + 'ObjectIdentifier', 'Real', 'Enumerated', 'SequenceOfAndSetOfBase', 'SequenceOf', + 'SetOf', 'SequenceAndSetBase', 'Sequence', 'Set', 'Choice', 'Any', + 'NoValue', 'noValue'] + # "Simple" ASN.1 types (yet incomplete) class Integer(base.AbstractSimpleAsn1Item): - tagSet = baseTagSet = tag.initTagSet( + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python :class:`int` objects. + + Parameters + ---------- + value : :class:`int`, :class:`str` or |ASN.1| object + Python integer or string literal or |ASN.1| class instance. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Object representing non-default symbolic aliases for numbers + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x02) - ) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object + #: representing symbolic aliases for numbers namedValues = namedval.NamedValues() - def __init__(self, value=None, tagSet=None, subtypeSpec=None, + + # Optimization for faster codec lookup + typeId = base.AbstractSimpleAsn1Item.getTypeId() + + def __init__(self, value=noValue, tagSet=None, subtypeSpec=None, namedValues=None): if namedValues is None: self.__namedValues = self.namedValues @@ -20,490 +72,1386 @@ def __init__(self, value=None, tagSet=None, subtypeSpec=None, self.__namedValues = namedValues base.AbstractSimpleAsn1Item.__init__( self, value, tagSet, subtypeSpec - ) + ) + + def __repr__(self): + if self.__namedValues is not self.namedValues: + return '%s, %r)' % (base.AbstractSimpleAsn1Item.__repr__(self)[:-1], self.__namedValues) + else: + return base.AbstractSimpleAsn1Item.__repr__(self) + + def __and__(self, value): + return self.clone(self._value & value) + + def __rand__(self, value): + return self.clone(value & self._value) + + def __or__(self, value): + return self.clone(self._value | value) + + def __ror__(self, value): + return self.clone(value | self._value) + + def __xor__(self, value): + return self.clone(self._value ^ value) + + def __rxor__(self, value): + return self.clone(value ^ self._value) + + def __lshift__(self, value): + return self.clone(self._value << value) + + def __rshift__(self, value): + return self.clone(self._value >> value) + + def __add__(self, value): + return self.clone(self._value + value) + + def __radd__(self, value): + return self.clone(value + self._value) + + def __sub__(self, value): + return self.clone(self._value - value) + + def __rsub__(self, value): + return self.clone(value - self._value) + + def __mul__(self, value): + return self.clone(self._value * value) - def __and__(self, value): return self.clone(self._value & value) - def __rand__(self, value): return self.clone(value & self._value) - def __or__(self, value): return self.clone(self._value | value) - def __ror__(self, value): return self.clone(value | self._value) - def __xor__(self, value): return self.clone(self._value ^ value) - def __rxor__(self, value): return self.clone(value ^ self._value) - def __lshift__(self, value): return self.clone(self._value << value) - def __rshift__(self, value): return self.clone(self._value >> value) - - def __add__(self, value): return self.clone(self._value + value) - def __radd__(self, value): return self.clone(value + self._value) - def __sub__(self, value): return self.clone(self._value - value) - def __rsub__(self, value): return self.clone(value - self._value) - def __mul__(self, value): return self.clone(self._value * value) - def __rmul__(self, value): return self.clone(value * self._value) - def __mod__(self, value): return self.clone(self._value % value) - def __rmod__(self, value): return self.clone(value % self._value) - def __pow__(self, value, modulo=None): return self.clone(pow(self._value, value, modulo)) - def __rpow__(self, value): return self.clone(pow(value, self._value)) + def __rmul__(self, value): + return self.clone(value * self._value) + + def __mod__(self, value): + return self.clone(self._value % value) + + def __rmod__(self, value): + return self.clone(value % self._value) + + def __pow__(self, value, modulo=None): + return self.clone(pow(self._value, value, modulo)) + + def __rpow__(self, value): + return self.clone(pow(value, self._value)) + + def __floordiv__(self, value): + return self.clone(self._value // value) + + def __rfloordiv__(self, value): + return self.clone(value // self._value) if sys.version_info[0] <= 2: - def __div__(self, value): return self.clone(self._value // value) - def __rdiv__(self, value): return self.clone(value // self._value) + def __div__(self, value): + if isinstance(value, float): + return Real(self._value / value) + else: + return self.clone(self._value / value) + + def __rdiv__(self, value): + if isinstance(value, float): + return Real(value / self._value) + else: + return self.clone(value / self._value) else: - def __truediv__(self, value): return self.clone(self._value / value) - def __rtruediv__(self, value): return self.clone(value / self._value) - def __divmod__(self, value): return self.clone(self._value // value) - def __rdivmod__(self, value): return self.clone(value // self._value) + def __truediv__(self, value): + return Real(self._value / value) + + def __rtruediv__(self, value): + return Real(value / self._value) + + def __divmod__(self, value): + return self.clone(divmod(self._value, value)) + + def __rdivmod__(self, value): + return self.clone(divmod(value, self._value)) __hash__ = base.AbstractSimpleAsn1Item.__hash__ - def __int__(self): return int(self._value) + def __int__(self): + return int(self._value) + if sys.version_info[0] <= 2: def __long__(self): return long(self._value) - def __float__(self): return float(self._value) - def __abs__(self): return abs(self._value) - def __index__(self): return int(self._value) - def __lt__(self, value): return self._value < value - def __le__(self, value): return self._value <= value - def __eq__(self, value): return self._value == value - def __ne__(self, value): return self._value != value - def __gt__(self, value): return self._value > value - def __ge__(self, value): return self._value >= value + def __float__(self): + return float(self._value) - def prettyIn(self, value): - if not isinstance(value, str): - try: - return int(value) - except: - raise error.PyAsn1Error( - 'Can\'t coerce %s into integer: %s' % (value, sys.exc_info()[1]) - ) - r = self.__namedValues.getValue(value) - if r is not None: + def __abs__(self): + return self.clone(abs(self._value)) + + def __index__(self): + return int(self._value) + + def __pos__(self): + return self.clone(+self._value) + + def __neg__(self): + return self.clone(-self._value) + + def __invert__(self): + return self.clone(~self._value) + + def __round__(self, n=0): + r = round(self._value, n) + if n: + return self.clone(r) + else: return r + + def __floor__(self): + return math.floor(self._value) + + def __ceil__(self): + return math.ceil(self._value) + + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): + return self.clone(math.trunc(self._value)) + + def __lt__(self, value): + return self._value < value + + def __le__(self, value): + return self._value <= value + + def __eq__(self, value): + return self._value == value + + def __ne__(self, value): + return self._value != value + + def __gt__(self, value): + return self._value > value + + def __ge__(self, value): + return self._value >= value + + def prettyIn(self, value): try: return int(value) - except: + + except ValueError: + valueOfName = self.__namedValues.getValue(value) + if valueOfName is not None: + return valueOfName + raise error.PyAsn1Error( - 'Can\'t coerce %s into integer: %s' % (value, sys.exc_info()[1]) - ) + 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1]) + ) def prettyOut(self, value): - r = self.__namedValues.getName(value) - return r is None and str(value) or repr(r) + nameOfValue = self.__namedValues.getName(value) + return nameOfValue is None and str(value) or repr(nameOfValue) - def getNamedValues(self): return self.__namedValues + def getNamedValues(self): + return self.__namedValues - def clone(self, value=None, tagSet=None, subtypeSpec=None, - namedValues=None): - if value is None and tagSet is None and subtypeSpec is None \ - and namedValues is None: - return self - if value is None: + def clone(self, value=noValue, tagSet=None, subtypeSpec=None, namedValues=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`int`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Object representing symbolic aliases for numbers to use instead of inheriting from caller + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if value is None or value is noValue: value = self._value - if tagSet is None: + else: + isModified = True + if tagSet is None or tagSet is noValue: tagSet = self._tagSet - if subtypeSpec is None: + else: + isModified = True + if subtypeSpec is None or subtypeSpec is noValue: subtypeSpec = self._subtypeSpec - if namedValues is None: + else: + isModified = True + if namedValues is None or namedValues is noValue: namedValues = self.__namedValues - return self.__class__(value, tagSet, subtypeSpec, namedValues) + else: + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec, namedValues) + else: + return self - def subtype(self, value=None, implicitTag=None, explicitTag=None, + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, subtypeSpec=None, namedValues=None): - if value is None: + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`int`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Add ASN.1 constraints object to one of the caller, then + use the result as new object's ASN.1 constraints. + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Add given object representing symbolic aliases for numbers + to one of the caller, then use the result as new object's + named numbers. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if value is None or value is noValue: value = self._value - if implicitTag is not None: + else: + isModified = True + if implicitTag is not None and implicitTag is not noValue: tagSet = self._tagSet.tagImplicitly(implicitTag) - elif explicitTag is not None: + isModified = True + elif explicitTag is not None and explicitTag is not noValue: tagSet = self._tagSet.tagExplicitly(explicitTag) + isModified = True else: tagSet = self._tagSet - if subtypeSpec is None: + if subtypeSpec is None or subtypeSpec is noValue: subtypeSpec = self._subtypeSpec else: - subtypeSpec = subtypeSpec + self._subtypeSpec - if namedValues is None: + subtypeSpec = self._subtypeSpec + subtypeSpec + isModified = True + if namedValues is None or namedValues is noValue: namedValues = self.__namedValues else: namedValues = namedValues + self.__namedValues - return self.__class__(value, tagSet, subtypeSpec, namedValues) + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec, namedValues) + else: + return self + class Boolean(Integer): - tagSet = baseTagSet = tag.initTagSet( + __doc__ = Integer.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x01), - ) - subtypeSpec = Integer.subtypeSpec+constraint.SingleValueConstraint(0,1) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = Integer.subtypeSpec + constraint.SingleValueConstraint(0, 1) + + #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object + #: representing symbolic aliases for numbers namedValues = Integer.namedValues.clone(('False', 0), ('True', 1)) + # Optimization for faster codec lookup + typeId = Integer.getTypeId() + + class BitString(base.AbstractSimpleAsn1Item): - tagSet = baseTagSet = tag.initTagSet( + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type both Python :class:`tuple` (as a tuple + of bits) and :class:`int` objects. + + Parameters + ---------- + value : :class:`int`, :class:`str` or |ASN.1| object + Python integer or string literal representing binary or hexadecimal + number or sequence of integer bits or |ASN.1| object. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Object representing non-default symbolic aliases for numbers + + binValue: :py:class:`str` + Binary string initializer to use instead of the *value*. + Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer to use instead of the *value*. + Example: 'DEADBEEF'. + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x03) - ) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object + #: representing symbolic aliases for numbers namedValues = namedval.NamedValues() - def __init__(self, value=None, tagSet=None, subtypeSpec=None, - namedValues=None): + + # Optimization for faster codec lookup + typeId = base.AbstractSimpleAsn1Item.getTypeId() + + defaultBinValue = defaultHexValue = noValue + + if sys.version_info[0] < 3: + SizedIntegerBase = long + else: + SizedIntegerBase = int + + class SizedInteger(SizedIntegerBase): + bitLength = leadingZeroBits = None + + def setBitLength(self, bitLength): + self.bitLength = bitLength + self.leadingZeroBits = max(bitLength - integer.bitLength(self), 0) + return self + + def __len__(self): + if self.bitLength is None: + self.setBitLength(integer.bitLength(self)) + + return self.bitLength + + def __init__(self, value=noValue, tagSet=None, subtypeSpec=None, + namedValues=None, binValue=noValue, hexValue=noValue): if namedValues is None: self.__namedValues = self.namedValues else: self.__namedValues = namedValues - base.AbstractSimpleAsn1Item.__init__( - self, value, tagSet, subtypeSpec - ) + if binValue is not noValue: + value = self.fromBinaryString(binValue) + elif hexValue is not noValue: + value = self.fromHexString(hexValue) + elif value is None or value is noValue: + if self.defaultBinValue is not noValue: + value = self.fromBinaryString(self.defaultBinValue) + elif self.defaultHexValue is not noValue: + value = self.fromHexString(self.defaultHexValue) + base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec) - def clone(self, value=None, tagSet=None, subtypeSpec=None, - namedValues=None): - if value is None and tagSet is None and subtypeSpec is None \ - and namedValues is None: - return self - if value is None: + def clone(self, value=noValue, tagSet=None, subtypeSpec=None, + namedValues=None, binValue=noValue, hexValue=noValue): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value : :class:`int`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Class instance representing BitString type enumerations + + binValue: :py:class:`str` + Binary string initializer to use instead of the *value*. + Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer to use instead of the *value*. + Example: 'DEADBEEF'. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if (value is None or value is noValue) and binValue is noValue and hexValue is noValue: value = self._value - if tagSet is None: + else: + isModified = True + if tagSet is None or tagSet is noValue: tagSet = self._tagSet - if subtypeSpec is None: + else: + isModified = True + if subtypeSpec is None or subtypeSpec is noValue: subtypeSpec = self._subtypeSpec - if namedValues is None: + else: + isModified = True + if namedValues is None or namedValues is noValue: namedValues = self.__namedValues - return self.__class__(value, tagSet, subtypeSpec, namedValues) + else: + isModified = True - def subtype(self, value=None, implicitTag=None, explicitTag=None, - subtypeSpec=None, namedValues=None): - if value is None: + if isModified: + return self.__class__(value, tagSet, subtypeSpec, namedValues, binValue, hexValue) + else: + return self + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, + subtypeSpec=None, namedValues=None, binValue=noValue, hexValue=noValue): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value : :class:`int`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Add ASN.1 constraints object to one of the caller, then + use the result as new object's ASN.1 constraints. + + namedValues: :py:class:`~pyasn1.type.namedval.NamedValues` + Add given object representing symbolic aliases for numbers + to one of the caller, then use the result as new object's + named numbers. + + binValue: :py:class:`str` + Binary string initializer to use instead of the *value*. + Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer to use instead of the *value*. + Example: 'DEADBEEF'. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if (value is None or value is noValue) and binValue is noValue and hexValue is noValue: value = self._value - if implicitTag is not None: + else: + isModified = True + if implicitTag is not None and implicitTag is not noValue: tagSet = self._tagSet.tagImplicitly(implicitTag) - elif explicitTag is not None: + isModified = True + elif explicitTag is not None and explicitTag is not noValue: tagSet = self._tagSet.tagExplicitly(explicitTag) + isModified = True else: tagSet = self._tagSet - if subtypeSpec is None: + if subtypeSpec is None or subtypeSpec is noValue: subtypeSpec = self._subtypeSpec else: - subtypeSpec = subtypeSpec + self._subtypeSpec - if namedValues is None: + subtypeSpec = self._subtypeSpec + subtypeSpec + isModified = True + if namedValues is None or namedValues is noValue: namedValues = self.__namedValues else: namedValues = namedValues + self.__namedValues - return self.__class__(value, tagSet, subtypeSpec, namedValues) + isModified = True - def __str__(self): return str(tuple(self)) + if isModified: + return self.__class__(value, tagSet, subtypeSpec, namedValues, binValue, hexValue) + else: + return self + + def __str__(self): + return self.asBinary() + + def __eq__(self, other): + other = self.prettyIn(other) + return self is other or self._value == other and len(self._value) == len(other) + + def __ne__(self, other): + other = self.prettyIn(other) + return self._value != other or len(self._value) != len(other) + + def __lt__(self, other): + other = self.prettyIn(other) + return len(self._value) < len(other) or len(self._value) == len(other) and self._value < other + + def __le__(self, other): + other = self.prettyIn(other) + return len(self._value) <= len(other) or len(self._value) == len(other) and self._value <= other + + def __gt__(self, other): + other = self.prettyIn(other) + return len(self._value) > len(other) or len(self._value) == len(other) and self._value > other + + def __ge__(self, other): + other = self.prettyIn(other) + return len(self._value) >= len(other) or len(self._value) == len(other) and self._value >= other # Immutable sequence object protocol def __len__(self): - if self._len is None: - self._len = len(self._value) - return self._len + return len(self._value) + def __getitem__(self, i): - if isinstance(i, slice): - return self.clone(operator.getitem(self._value, i)) + if i.__class__ is slice: + return self.clone([self[x] for x in range(*i.indices(len(self)))]) else: - return self._value[i] + length = len(self._value) - 1 + if i > length or i < 0: + raise IndexError('bit index out of range') + return (self._value >> (length - i)) & 1 + + def __iter__(self): + length = len(self._value) + while length: + length -= 1 + yield (self._value >> length) & 1 + + def __reversed__(self): + return reversed(tuple(self)) + + # arithmetic operators + + def __add__(self, value): + value = self.prettyIn(value) + return self.clone(self.SizedInteger(self._value << len(value) | value).setBitLength(len(self._value) + len(value))) + + def __radd__(self, value): + value = self.prettyIn(value) + return self.clone(self.SizedInteger(value << len(self._value) | self._value).setBitLength(len(self._value) + len(value))) + + def __mul__(self, value): + bitString = self._value + while value > 1: + bitString <<= len(self._value) + bitString |= self._value + value -= 1 + return self.clone(bitString) + + def __rmul__(self, value): + return self * value + + def __lshift__(self, count): + return self.clone(self.SizedInteger(self._value << count).setBitLength(len(self._value) + count)) + + def __rshift__(self, count): + return self.clone(self.SizedInteger(self._value >> count).setBitLength(max(0, len(self._value) - count))) + + def __int__(self): + return self._value + + def __float__(self): + return float(self._value) + + if sys.version_info[0] < 3: + def __long__(self): + return self._value + + def asNumbers(self): + """Get |ASN.1| value as a sequence of 8-bit integers. + + If |ASN.1| object length is not a multiple of 8, result + will be left-padded with zeros. + """ + return tuple(octets.octs2ints(self.asOctets())) + + def asOctets(self): + """Get |ASN.1| value as a sequence of octets. + + If |ASN.1| object length is not a multiple of 8, result + will be left-padded with zeros. + """ + return integer.to_bytes(self._value, length=len(self)) + + def asInteger(self): + """Get |ASN.1| value as a single integer value. + """ + return self._value + + def asBinary(self): + """Get |ASN.1| value as a text string of bits. + """ + binString = binary.bin(self._value)[2:] + return '0'*(len(self._value) - len(binString)) + binString + + @classmethod + def fromHexString(cls, value): + try: + return cls.SizedInteger(value, 16).setBitLength(len(value) * 4) + + except ValueError: + raise error.PyAsn1Error('%s.fromHexString() error: %s' % (cls.__name__, sys.exc_info()[1])) + + @classmethod + def fromBinaryString(cls, value): + try: + return cls.SizedInteger(value or '0', 2).setBitLength(len(value)) - def __add__(self, value): return self.clone(self._value + value) - def __radd__(self, value): return self.clone(value + self._value) - def __mul__(self, value): return self.clone(self._value * value) - def __rmul__(self, value): return self * value + except ValueError: + raise error.PyAsn1Error('%s.fromBinaryString() error: %s' % (cls.__name__, sys.exc_info()[1])) + + @classmethod + def fromOctetString(cls, value, padding=0): + return cls(cls.SizedInteger(integer.from_bytes(value) >> padding).setBitLength(len(value) * 8 - padding)) def prettyIn(self, value): - r = [] - if not value: - return () - elif isinstance(value, str): - if value[0] == '\'': + if octets.isStringType(value): + if not value: + return self.SizedInteger(0).setBitLength(0) + + elif value[0] == '\'': # "'1011'B" -- ASN.1 schema representation (deprecated) if value[-2:] == '\'B': - for v in value[1:-2]: - if v == '0': - r.append(0) - elif v == '1': - r.append(1) - else: - raise error.PyAsn1Error( - 'Non-binary BIT STRING initializer %s' % (v,) - ) - return tuple(r) + return self.fromBinaryString(value[1:-2]) elif value[-2:] == '\'H': - for v in value[1:-2]: - i = 4 - v = int(v, 16) - while i: - i = i - 1 - r.append((v>>i)&0x01) - return tuple(r) + return self.fromHexString(value[1:-2]) else: raise error.PyAsn1Error( 'Bad BIT STRING value notation %s' % (value,) - ) - else: - for i in value.split(','): - j = self.__namedValues.getValue(i) - if j is None: - raise error.PyAsn1Error( - 'Unknown bit identifier \'%s\'' % (i,) - ) - if j >= len(r): - r.extend([0]*(j-len(r)+1)) - r[j] = 1 - return tuple(r) + ) + + elif self.__namedValues and not value.isdigit(): # named bits like 'Urgent, Active' + bitPositions = self.__namedValues.getValues(*[x.strip() for x in value.split(',')]) + + rightmostPosition = max(bitPositions) + + number = 0 + for bitPosition in bitPositions: + number |= 1 << (rightmostPosition - bitPosition) + + return self.SizedInteger(number).setBitLength(rightmostPosition + 1) + + elif value.startswith('0x'): + return self.fromHexString(value[2:]) + + elif value.startswith('0b'): + return self.fromBinaryString(value[2:]) + + else: # assume plain binary string like '1011' + return self.fromBinaryString(value) + elif isinstance(value, (tuple, list)): - r = tuple(value) - for b in r: - if b and b != 1: - raise error.PyAsn1Error( - 'Non-binary BitString initializer \'%s\'' % (r,) - ) - return r - elif isinstance(value, BitString): - return tuple(value) + return self.fromBinaryString(''.join([b and '1' or '0' for b in value])) + + elif isinstance(value, (self.SizedInteger, BitString)): + return self.SizedInteger(value).setBitLength(len(value)) + + elif isinstance(value, intTypes): + return self.SizedInteger(value) + else: raise error.PyAsn1Error( 'Bad BitString initializer type \'%s\'' % (value,) - ) + ) def prettyOut(self, value): - return '\"\'%s\'B\"' % ''.join([str(x) for x in value]) + return '\'%s\'' % str(self) + + +try: + # noinspection PyStatementEffect + all + +except NameError: # Python 2.4 + # noinspection PyShadowingBuiltins + def all(iterable): + for element in iterable: + if not element: + return False + return True + class OctetString(base.AbstractSimpleAsn1Item): - tagSet = baseTagSet = tag.initTagSet( + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python 2 :class:`str` or Python 3 :class:`bytes`. + When used in Unicode context, |ASN.1| type assumes "|encoding|" serialization. + + Parameters + ---------- + value : :class:`str`, :class:`bytes` or |ASN.1| object + string (Python 2) or bytes (Python 3), alternatively unicode object + (Python 2) or string (Python 3) representing character string to be + serialized into octets (note `encoding` parameter) or |ASN.1| object. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :class:`unicode` (Python 2) or + :class:`str` (Python 3) the payload when |ASN.1| object is used + in text string context. + + binValue: :py:class:`str` + Binary string initializer to use instead of the *value*. + Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer to use instead of the *value*. + Example: 'DEADBEEF'. + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04) - ) - defaultBinValue = defaultHexValue = base.noValue - encoding = 'us-ascii' - def __init__(self, value=None, tagSet=None, subtypeSpec=None, - encoding=None, binValue=None, hexValue=None): + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Optimization for faster codec lookup + typeId = base.AbstractSimpleAsn1Item.getTypeId() + + defaultBinValue = defaultHexValue = noValue + encoding = 'iso-8859-1' + + def __init__(self, value=noValue, tagSet=None, subtypeSpec=None, + encoding=None, binValue=noValue, hexValue=noValue): if encoding is None: self._encoding = self.encoding else: self._encoding = encoding - if binValue is not None: + if binValue is not noValue: value = self.fromBinaryString(binValue) - if hexValue is not None: + elif hexValue is not noValue: value = self.fromHexString(hexValue) - if value is None or value is base.noValue: - value = self.defaultHexValue - if value is None or value is base.noValue: - value = self.defaultBinValue - self.__intValue = None + elif value is None or value is noValue: + if self.defaultBinValue is not noValue: + value = self.fromBinaryString(self.defaultBinValue) + elif self.defaultHexValue is not noValue: + value = self.fromHexString(self.defaultHexValue) + self.__asNumbersCache = None base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec) - def clone(self, value=None, tagSet=None, subtypeSpec=None, - encoding=None, binValue=None, hexValue=None): - if value is None and tagSet is None and subtypeSpec is None and \ - encoding is None and binValue is None and hexValue is None: + def clone(self, value=noValue, tagSet=None, subtypeSpec=None, + encoding=None, binValue=noValue, hexValue=noValue): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value : :class:`str`, :class:`bytes` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :class:`unicode` (Python 2) + or :class:`str` (Python 3) the payload when |ASN.1| + object is used in string context. + + binValue: :py:class:`str` + Binary string initializer. Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer. Example: 'DEADBEEF'. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if (value is None or value is noValue) and binValue is noValue and hexValue is noValue: + value = self._value + else: + isModified = True + if tagSet is None or tagSet is noValue: + tagSet = self._tagSet + else: + isModified = True + if subtypeSpec is None or subtypeSpec is noValue: + subtypeSpec = self._subtypeSpec + else: + isModified = True + if encoding is None or encoding is noValue: + encoding = self._encoding + else: + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec, encoding, binValue, hexValue) + else: return self - if value is None and binValue is None and hexValue is None: + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, + subtypeSpec=None, encoding=None, binValue=noValue, + hexValue=noValue): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value : :class:`str`, :class:`bytes` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to |ASN.1| object tag set + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to |ASN.1| object tag set + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Add ASN.1 constraints object to one of the caller, then + use the result as new object's ASN.1 constraints. + + encoding: :py:class:`str` + Unicode codec ID to encode/decode :class:`unicode` (Python 2) + or :class:`str` (Python 3) the payload when *OctetString* + object is used in string context. + + binValue: :py:class:`str` + Binary string initializer. Example: '10110011'. + + hexValue: :py:class:`str` + Hexadecimal string initializer. Example: 'DEADBEEF'. + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + isModified = False + + if (value is None or value is noValue) and binValue is noValue and hexValue is noValue: value = self._value - if tagSet is None: + else: + isModified = True + if implicitTag is not None and implicitTag is not noValue: + tagSet = self._tagSet.tagImplicitly(implicitTag) + isModified = True + elif explicitTag is not None and explicitTag is not noValue: + tagSet = self._tagSet.tagExplicitly(explicitTag) + isModified = True + else: tagSet = self._tagSet - if subtypeSpec is None: + if subtypeSpec is None or subtypeSpec is noValue: subtypeSpec = self._subtypeSpec - if encoding is None: + else: + subtypeSpec = self._subtypeSpec + subtypeSpec + isModified = True + if encoding is None or encoding is noValue: encoding = self._encoding - return self.__class__( - value, tagSet, subtypeSpec, encoding, binValue, hexValue - ) - + else: + isModified = True + + if isModified: + return self.__class__(value, tagSet, subtypeSpec, encoding, binValue, hexValue) + else: + return self + if sys.version_info[0] <= 2: def prettyIn(self, value): if isinstance(value, str): return value + elif isinstance(value, unicode): + try: + return value.encode(self._encoding) + except (LookupError, UnicodeEncodeError): + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) elif isinstance(value, (tuple, list)): try: - return ''.join([ chr(x) for x in value ]) + return ''.join([chr(x) for x in value]) except ValueError: raise error.PyAsn1Error( - 'Bad OctetString initializer \'%s\'' % (value,) - ) + 'Bad %s initializer \'%s\'' % (self.__class__.__name__, value) + ) else: return str(value) + + def __str__(self): + return str(self._value) + + def __unicode__(self): + try: + return self._value.decode(self._encoding) + + except UnicodeDecodeError: + raise error.PyAsn1Error( + 'Can\'t decode string \'%s\' with \'%s\' codec' % (self._value, self._encoding) + ) + + def asOctets(self): + return str(self._value) + + def asNumbers(self): + if self.__asNumbersCache is None: + self.__asNumbersCache = tuple([ord(x) for x in self._value]) + return self.__asNumbersCache + else: def prettyIn(self, value): if isinstance(value, bytes): return value - elif isinstance(value, OctetString): - return value.asOctets() - elif isinstance(value, (tuple, list, map)): + elif isinstance(value, str): try: - return bytes(value) - except ValueError: - raise error.PyAsn1Error( - 'Bad OctetString initializer \'%s\'' % (value,) - ) - else: - try: - return str(value).encode(self._encoding) + return value.encode(self._encoding) except UnicodeEncodeError: raise error.PyAsn1Error( 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) - ) - + ) + elif isinstance(value, OctetString): # a shortcut, bytes() would work the same way + return value.asOctets() + elif isinstance(value, base.AbstractSimpleAsn1Item): # this mostly targets Integer objects + return self.prettyIn(str(value)) + elif isinstance(value, (tuple, list)): + return self.prettyIn(bytes(value)) + else: + return bytes(value) + + def __str__(self): + try: + return self._value.decode(self._encoding) + + except UnicodeDecodeError: + raise error.PyAsn1Error( + 'Can\'t decode string \'%s\' with \'%s\' codec at \'%s\'' % (self._value, self._encoding, self.__class__.__name__) + ) + + def __bytes__(self): + return bytes(self._value) + + def asOctets(self): + return bytes(self._value) + + def asNumbers(self): + if self.__asNumbersCache is None: + self.__asNumbersCache = tuple(self._value) + return self.__asNumbersCache - def fromBinaryString(self, value): - bitNo = 8; byte = 0; r = () + def prettyOut(self, value): + if sys.version_info[0] <= 2: + numbers = tuple((ord(x) for x in value)) + else: + numbers = tuple(value) + for x in numbers: + if x < 32 or x > 126: + return '0x' + ''.join(('%.2x' % x for x in numbers)) + else: + return octets.octs2str(value) + + @staticmethod + def fromBinaryString(value): + bitNo = 8 + byte = 0 + r = [] for v in value: if bitNo: - bitNo = bitNo - 1 + bitNo -= 1 else: bitNo = 7 - r = r + (byte,) + r.append(byte) byte = 0 - if v == '0': - v = 0 - elif v == '1': - v = 1 + if v in ('0', '1'): + v = int(v) else: raise error.PyAsn1Error( 'Non-binary OCTET STRING initializer %s' % (v,) - ) - byte = byte | (v << bitNo) - return octets.ints2octs(r + (byte,)) - - def fromHexString(self, value): - r = p = () + ) + byte |= v << bitNo + + r.append(byte) + + return octets.ints2octs(r) + + @staticmethod + def fromHexString(value): + r = [] + p = [] for v in value: if p: - r = r + (int(p+v, 16),) - p = () + r.append(int(p + v, 16)) + p = None else: p = v if p: - r = r + (int(p+'0', 16),) - return octets.ints2octs(r) + r.append(int(p + '0', 16)) - def prettyOut(self, value): - if sys.version_info[0] <= 2: - numbers = tuple([ ord(x) for x in value ]) - else: - numbers = tuple(value) - if [ x for x in numbers if x < 32 or x > 126 ]: - return '0x' + ''.join([ '%.2x' % x for x in numbers ]) - else: - return str(value) + return octets.ints2octs(r) def __repr__(self): - if self._value is base.noValue: - return self.__class__.__name__ + '()' - if [ x for x in self.asNumbers() if x < 32 or x > 126 ]: - return self.__class__.__name__ + '(hexValue=\'' + ''.join([ '%.2x' % x for x in self.asNumbers() ])+'\')' - else: - return self.__class__.__name__ + '(\'' + self.prettyOut(self._value) + '\')' - - if sys.version_info[0] <= 2: - def __str__(self): return str(self._value) - def __unicode__(self): - return self._value.decode(self._encoding, 'ignore') - def asOctets(self): return self._value - def asNumbers(self): - if self.__intValue is None: - self.__intValue = tuple([ ord(x) for x in self._value ]) - return self.__intValue - else: - def __str__(self): return self._value.decode(self._encoding, 'ignore') - def __bytes__(self): return self._value - def asOctets(self): return self._value - def asNumbers(self): - if self.__intValue is None: - self.__intValue = tuple(self._value) - return self.__intValue - + r = [] + doHex = False + if self._value is not self.defaultValue: + for x in self.asNumbers(): + if x < 32 or x > 126: + doHex = True + break + if not doHex: + r.append('%r' % (self._value,)) + if self._tagSet is not self.__class__.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + if self.encoding is not self._encoding: + r.append('encoding=%r' % (self._encoding,)) + if doHex: + r.append('hexValue=%r' % ''.join(['%.2x' % x for x in self.asNumbers()])) + return '%s(%s)' % (self.__class__.__name__, ', '.join(r)) + # Immutable sequence object protocol - + def __len__(self): if self._len is None: self._len = len(self._value) return self._len + def __getitem__(self, i): - if isinstance(i, slice): - return self.clone(operator.getitem(self._value, i)) + if i.__class__ is slice: + return self.clone(self._value[i]) else: return self._value[i] - def __add__(self, value): return self.clone(self._value + self.prettyIn(value)) - def __radd__(self, value): return self.clone(self.prettyIn(value) + self._value) - def __mul__(self, value): return self.clone(self._value * value) - def __rmul__(self, value): return self * value + def __iter__(self): + return iter(self._value) + + def __contains__(self, value): + return value in self._value + + def __add__(self, value): + return self.clone(self._value + self.prettyIn(value)) + + def __radd__(self, value): + return self.clone(self.prettyIn(value) + self._value) + + def __mul__(self, value): + return self.clone(self._value * value) + + def __rmul__(self, value): + return self * value + + def __int__(self): + return int(self._value) + + def __float__(self): + return float(self._value) + + def __reversed__(self): + return reversed(self._value) + class Null(OctetString): + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python :class:`str` objects (always empty). + + Parameters + ---------- + value : :class:`str` or :py:class:`~pyasn1.type.univ.Null` object + Python empty string literal or *Null* class instance. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ defaultValue = ''.encode() # This is tightly constrained - tagSet = baseTagSet = tag.initTagSet( + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05) - ) - subtypeSpec = OctetString.subtypeSpec+constraint.SingleValueConstraint(''.encode()) - + ) + subtypeSpec = OctetString.subtypeSpec + constraint.SingleValueConstraint(octets.str2octs('')) + + # Optimization for faster codec lookup + typeId = OctetString.getTypeId() + + def clone(self, value=noValue, tagSet=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + Returns + ------- + : :py:class:`~pyasn1.type.univ.Null` + new instance of NULL type/value + """ + return OctetString.clone(self, value, tagSet) + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`int`, :class:`str` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + Returns + ------- + : :py:class:`~pyasn1.type.univ.Null` + new instance of NULL type/value + """ + return OctetString.subtype(self, value, implicitTag, explicitTag) + + if sys.version_info[0] <= 2: intTypes = (int, long) else: - intTypes = int + intTypes = (int,) + +numericTypes = intTypes + (float,) + class ObjectIdentifier(base.AbstractSimpleAsn1Item): - tagSet = baseTagSet = tag.initTagSet( + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python :class:`tuple` objects (tuple of non-negative integers). + + Parameters + ---------- + value: :class:`tuple`, :class:`str` or |ASN.1| object + Python sequence of :class:`int` or string literal or |ASN.1| object. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + """ + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x06) - ) - def __add__(self, other): return self.clone(self._value + other) - def __radd__(self, other): return self.clone(other + self._value) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Optimization for faster codec lookup + typeId = base.AbstractSimpleAsn1Item.getTypeId() + + def __add__(self, other): + return self.clone(self._value + other) + + def __radd__(self, other): + return self.clone(other + self._value) + + def asTuple(self): + return self._value - def asTuple(self): return self._value - # Sequence object protocol - + def __len__(self): if self._len is None: self._len = len(self._value) return self._len + def __getitem__(self, i): - if isinstance(i, slice): - return self.clone( - operator.getitem(self._value, i) - ) + if i.__class__ is slice: + return self.clone(self._value[i]) else: return self._value[i] - def __str__(self): return self.prettyPrint() - - def index(self, suboid): return self._value.index(suboid) + def __iter__(self): + return iter(self._value) + + def __contains__(self, value): + return value in self._value + + def __str__(self): + return self.prettyPrint() - def isPrefixOf(self, value): - """Returns true if argument OID resides deeper in the OID tree""" + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self.prettyPrint()) + + def index(self, suboid): + return self._value.index(suboid) + + def isPrefixOf(self, other): + """Indicate if this |ASN.1| object is a prefix of other |ASN.1| object. + + Parameters + ---------- + other: |ASN.1| object + |ASN.1| object + + Returns + ------- + : :class:`bool` + :class:`True` if this |ASN.1| object is a parent (e.g. prefix) of the other |ASN.1| object + or :class:`False` otherwise. + """ l = len(self) - if l <= len(value): - if self._value[:l] == value[:l]: - return 1 - return 0 + if l <= len(other): + if self._value[:l] == other[:l]: + return True + return False def prettyIn(self, value): - """Dotted -> tuple of numerics OID converter""" - if isinstance(value, tuple): - pass - elif isinstance(value, ObjectIdentifier): - return tuple(value) - elif isinstance(value, str): - r = [] - for element in [ x for x in value.split('.') if x != '' ]: - try: - r.append(int(element, 0)) - except ValueError: - raise error.PyAsn1Error( - 'Malformed Object ID %s at %s: %s' % - (str(value), self.__class__.__name__, sys.exc_info()[1]) - ) - value = tuple(r) - else: + if isinstance(value, ObjectIdentifier): + return tuple(value) + elif octets.isStringType(value): + if '-' in value: + raise error.PyAsn1Error( + 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1]) + ) try: - value = tuple(value) - except TypeError: + return tuple([int(subOid) for subOid in value.split('.') if subOid]) + except ValueError: raise error.PyAsn1Error( - 'Malformed Object ID %s at %s: %s' % - (str(value), self.__class__.__name__,sys.exc_info()[1]) - ) + 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1]) + ) + + try: + tupleOfInts = tuple([int(subOid) for subOid in value if subOid >= 0]) + + except (ValueError, TypeError): + raise error.PyAsn1Error( + 'Malformed Object ID %s at %s: %s' % (value, self.__class__.__name__, sys.exc_info()[1]) + ) + + if len(tupleOfInts) == len(value): + return tupleOfInts + + raise error.PyAsn1Error('Malformed Object ID %s at %s' % (value, self.__class__.__name__)) + + def prettyOut(self, value): + return '.'.join([str(x) for x in value]) - for x in value: - if not isinstance(x, intTypes) or x < 0: - raise error.PyAsn1Error( - 'Invalid sub-ID in %s at %s' % (value, self.__class__.__name__) - ) - - return value - def prettyOut(self, value): return '.'.join([ str(x) for x in value ]) - class Real(base.AbstractSimpleAsn1Item): + """Create |ASN.1| type or object. + + |ASN.1| objects are immutable and duck-type Python :class:`float` objects. + Additionally, |ASN.1| objects behave like a :class:`tuple` in which case its + elements are mantissa, base and exponent. + + Parameters + ---------- + value: :class:`tuple`, :class:`float` or |ASN.1| object + Python sequence of :class:`int` (representing mantissa, base and + exponent) or float instance or *Real* class instance. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + Raises + ------ + : :py:class:`pyasn1.error.PyAsn1Error` + On constraint violation or bad initializer. + + """ + binEncBase = None # binEncBase = 16 is recommended for large numbers + try: _plusInf = float('inf') _minusInf = float('-inf') @@ -513,382 +1461,1062 @@ class Real(base.AbstractSimpleAsn1Item): _plusInf = _minusInf = None _inf = () - tagSet = baseTagSet = tag.initTagSet( + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x09) - ) - - def __normalizeBase10(self, value): + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Optimization for faster codec lookup + typeId = base.AbstractSimpleAsn1Item.getTypeId() + + def clone(self, value=noValue, tagSet=None, subtypeSpec=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *clone()* method will replace corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`tuple`, :class:`float` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tag(s) to use in new object instead of inheriting from the caller + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + return base.AbstractSimpleAsn1Item.clone(self, value, tagSet, subtypeSpec) + + def subtype(self, value=noValue, implicitTag=None, explicitTag=None, + subtypeSpec=None): + """Create a copy of a |ASN.1| type or object. + + Any parameters to the *subtype()* method will be added to the corresponding + properties of the |ASN.1| object. + + Parameters + ---------- + value: :class:`tuple`, :class:`float` or |ASN.1| object + Initialization value to pass to new ASN.1 object instead of + inheriting one from the caller. + + implicitTag: :py:class:`~pyasn1.type.tag.Tag` + Implicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + explicitTag: :py:class:`~pyasn1.type.tag.Tag` + Explicitly apply given ASN.1 tag object to caller's + :py:class:`~pyasn1.type.tag.TagSet`, then use the result as + new object's ASN.1 tag(s). + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing ASN.1 subtype constraint(s) to use in new object instead of inheriting from the caller + + Returns + ------- + : + new instance of |ASN.1| type/value + """ + return base.AbstractSimpleAsn1Item.subtype(self, value, implicitTag, explicitTag) + + @staticmethod + def __normalizeBase10(value): m, b, e = value while m and m % 10 == 0: - m = m / 10 - e = e + 1 + m /= 10 + e += 1 return m, b, e def prettyIn(self, value): if isinstance(value, tuple) and len(value) == 3: - for d in value: - if not isinstance(d, intTypes): - raise error.PyAsn1Error( - 'Lame Real value syntax: %s' % (value,) - ) + if not isinstance(value[0], numericTypes) or \ + not isinstance(value[1], intTypes) or \ + not isinstance(value[2], intTypes): + raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,)) + if isinstance(value[0], float) and \ + self._inf and value[0] in self._inf: + return value[0] if value[1] not in (2, 10): raise error.PyAsn1Error( 'Prohibited base for Real value: %s' % (value[1],) - ) + ) if value[1] == 10: value = self.__normalizeBase10(value) return value elif isinstance(value, intTypes): return self.__normalizeBase10((value, 10, 0)) - elif isinstance(value, float): + elif isinstance(value, float) or octets.isStringType(value): + if octets.isStringType(value): + try: + value = float(value) + except ValueError: + raise error.PyAsn1Error( + 'Bad real value syntax: %s' % (value,) + ) if self._inf and value in self._inf: return value else: e = 0 while int(value) != value: - value = value * 10 - e = e - 1 + value *= 10 + e -= 1 return self.__normalizeBase10((int(value), 10, e)) elif isinstance(value, Real): return tuple(value) - elif isinstance(value, str): # handle infinite literal - try: - return float(value) - except ValueError: - pass raise error.PyAsn1Error( 'Bad real value syntax: %s' % (value,) - ) - + ) + def prettyOut(self, value): if value in self._inf: return '\'%s\'' % value else: return str(value) - def isPlusInfinity(self): return self._value == self._plusInf - def isMinusInfinity(self): return self._value == self._minusInf - def isInfinity(self): return self._value in self._inf - - def __str__(self): return str(float(self)) - - def __add__(self, value): return self.clone(float(self) + value) - def __radd__(self, value): return self + value - def __mul__(self, value): return self.clone(float(self) * value) - def __rmul__(self, value): return self * value - def __sub__(self, value): return self.clone(float(self) - value) - def __rsub__(self, value): return self.clone(value - float(self)) - def __mod__(self, value): return self.clone(float(self) % value) - def __rmod__(self, value): return self.clone(value % float(self)) - def __pow__(self, value, modulo=None): return self.clone(pow(float(self), value, modulo)) - def __rpow__(self, value): return self.clone(pow(value, float(self))) + def prettyPrint(self, scope=0): + if self.isInfinity(): + return self.prettyOut(self._value) + else: + return str(float(self)) + + def isPlusInfinity(self): + """Indicate PLUS-INFINITY object value + + Returns + ------- + : :class:`bool` + :class:`True` if calling object represents plus infinity + or :class:`False` otherwise. + + """ + return self._value == self._plusInf + + def isMinusInfinity(self): + """Indicate MINUS-INFINITY object value + + Returns + ------- + : :class:`bool` + :class:`True` if calling object represents minus infinity + or :class:`False` otherwise. + """ + return self._value == self._minusInf + + def isInfinity(self): + return self._value in self._inf + + def __str__(self): + return str(float(self)) + + def __add__(self, value): + return self.clone(float(self) + value) + + def __radd__(self, value): + return self + value + + def __mul__(self, value): + return self.clone(float(self) * value) + + def __rmul__(self, value): + return self * value + + def __sub__(self, value): + return self.clone(float(self) - value) + + def __rsub__(self, value): + return self.clone(value - float(self)) + + def __mod__(self, value): + return self.clone(float(self) % value) + + def __rmod__(self, value): + return self.clone(value % float(self)) + + def __pow__(self, value, modulo=None): + return self.clone(pow(float(self), value, modulo)) + + def __rpow__(self, value): + return self.clone(pow(value, float(self))) if sys.version_info[0] <= 2: - def __div__(self, value): return self.clone(float(self) / value) - def __rdiv__(self, value): return self.clone(value / float(self)) + def __div__(self, value): + return self.clone(float(self) / value) + + def __rdiv__(self, value): + return self.clone(value / float(self)) else: - def __truediv__(self, value): return self.clone(float(self) / value) - def __rtruediv__(self, value): return self.clone(value / float(self)) - def __divmod__(self, value): return self.clone(float(self) // value) - def __rdivmod__(self, value): return self.clone(value // float(self)) + def __truediv__(self, value): + return self.clone(float(self) / value) + + def __rtruediv__(self, value): + return self.clone(value / float(self)) + + def __divmod__(self, value): + return self.clone(float(self) // value) + + def __rdivmod__(self, value): + return self.clone(value // float(self)) + + def __int__(self): + return int(float(self)) - def __int__(self): return int(float(self)) if sys.version_info[0] <= 2: def __long__(self): return long(float(self)) + def __float__(self): if self._value in self._inf: return self._value else: return float( self._value[0] * pow(self._value[1], self._value[2]) - ) - def __abs__(self): return abs(float(self)) + ) + + def __abs__(self): + return self.clone(abs(float(self))) + + def __pos__(self): + return self.clone(+float(self)) + + def __neg__(self): + return self.clone(-float(self)) + + def __round__(self, n=0): + r = round(float(self), n) + if n: + return self.clone(r) + else: + return r + + def __floor__(self): + return self.clone(math.floor(float(self))) + + def __ceil__(self): + return self.clone(math.ceil(float(self))) + + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): return self.clone(math.trunc(float(self))) + + def __lt__(self, value): + return float(self) < value + + def __le__(self, value): + return float(self) <= value + + def __eq__(self, value): + return float(self) == value + + def __ne__(self, value): + return float(self) != value + + def __gt__(self, value): + return float(self) > value + + def __ge__(self, value): + return float(self) >= value + + if sys.version_info[0] <= 2: + def __nonzero__(self): + return bool(float(self)) + else: + def __bool__(self): + return bool(float(self)) + + __hash__ = base.AbstractSimpleAsn1Item.__hash__ + + def __getitem__(self, idx): + if self._value in self._inf: + raise error.PyAsn1Error('Invalid infinite value operation') + else: + return self._value[idx] + + +class Enumerated(Integer): + __doc__ = Integer.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0A) + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Optimization for faster codec lookup + typeId = Integer.getTypeId() + + #: Default :py:class:`~pyasn1.type.namedval.NamedValues` object + #: representing symbolic aliases for numbers + namedValues = namedval.NamedValues() + + +# "Structured" ASN.1 types + +class SequenceOfAndSetOfBase(base.AbstractConstructedAsn1Item): + """Create |ASN.1| type. + + |ASN.1| objects are mutable and duck-type Python :class:`list` objects. + + Parameters + ---------- + componentType : :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A pyasn1 object representing ASN.1 type allowed within |ASN.1| type + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing collection size constraint + """ + + # Python list protocol + + def clear(self): + self._componentValues = [] + + def append(self, value): + self[len(self)] = value - def __lt__(self, value): return float(self) < value - def __le__(self, value): return float(self) <= value - def __eq__(self, value): return float(self) == value - def __ne__(self, value): return float(self) != value - def __gt__(self, value): return float(self) > value - def __ge__(self, value): return float(self) >= value + def count(self, value): + return self._componentValues.count(value) - if sys.version_info[0] <= 2: - def __nonzero__(self): return bool(float(self)) - else: - def __bool__(self): return bool(float(self)) - __hash__ = base.AbstractSimpleAsn1Item.__hash__ + def extend(self, values): + for value in values: + self.append(value) - def __getitem__(self, idx): - if self._value in self._inf: - raise error.PyAsn1Error('Invalid infinite value operation') - else: - return self._value[idx] - -class Enumerated(Integer): - tagSet = baseTagSet = tag.initTagSet( - tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0A) - ) + def index(self, value, start=0, stop=None): + if stop is None: + stop = len(self) + return self._componentValues.index(value, start, stop) -# "Structured" ASN.1 types + def reverse(self): + self._componentValues.reverse() -class SetOf(base.AbstractConstructedAsn1Item): - componentType = None - tagSet = baseTagSet = tag.initTagSet( - tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11) - ) - typeId = 1 + def sort(self, key=None, reverse=False): + self._componentValues.sort(key=key, reverse=reverse) + + def __iter__(self): + return iter(self._componentValues) def _cloneComponentValues(self, myClone, cloneValueFlag): - idx = 0; l = len(self._componentValues) - while idx < l: - c = self._componentValues[idx] - if c is not None: - if isinstance(c, base.AbstractConstructedAsn1Item): + for idx, componentValue in enumerate(self._componentValues): + if componentValue is not None: + if isinstance(componentValue, base.AbstractConstructedAsn1Item): myClone.setComponentByPosition( - idx, c.clone(cloneValueFlag=cloneValueFlag) - ) + idx, componentValue.clone(cloneValueFlag=cloneValueFlag) + ) else: - myClone.setComponentByPosition(idx, c.clone()) - idx = idx + 1 - - def _verifyComponent(self, idx, value): - if self._componentType is not None and \ - not self._componentType.isSuperTypeOf(value): - raise error.PyAsn1Error('Component type error %s' % (value,)) - - def getComponentByPosition(self, idx): return self._componentValues[idx] - def setComponentByPosition(self, idx, value=None, verifyConstraints=True): - l = len(self._componentValues) - if idx >= l: - self._componentValues = self._componentValues + (idx-l+1)*[None] - if value is None: - if self._componentValues[idx] is None: - if self._componentType is None: - raise error.PyAsn1Error('Component type not defined') - self._componentValues[idx] = self._componentType.clone() - self._componentValuesSet = self._componentValuesSet + 1 - return self - elif not isinstance(value, base.Asn1Item): - if self._componentType is None: + myClone.setComponentByPosition(idx, componentValue.clone()) + + def getComponentByPosition(self, idx): + """Return |ASN.1| type component value by position. + + Equivalent to Python sequence subscription operation (e.g. `[]`). + + Parameters + ---------- + idx : :class:`int` + Component index (zero-based). Must either refer to an existing + component or to N+1 component (of *componentType is set). In the latter + case a new component type gets instantiated and appended to the |ASN.1| + sequence. + + Returns + ------- + : :py:class:`~pyasn1.type.base.PyAsn1Item` + a pyasn1 object + """ + try: + return self._componentValues[idx] + except IndexError: + self.setComponentByPosition(idx) + return self._componentValues[idx] + + def setComponentByPosition(self, idx, value=noValue, + verifyConstraints=True, + matchTags=True, + matchConstraints=True): + """Assign |ASN.1| type component by position. + + Equivalent to Python sequence item assignment operation (e.g. `[]`) + or list.append() (when idx == len(self)). + + Parameters + ---------- + idx: :class:`int` + Component index (zero-based). Must either refer to existing + component or to N+1 component. In the latter case a new component + type gets instantiated (if *componentType* is set, or given ASN.1 + object is taken otherwise) and appended to the |ASN.1| sequence. + + value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A Python value to initialize |ASN.1| component with (if *componentType* is set) + or ASN.1 value object to assign to |ASN.1| component. + + verifyConstraints: :class:`bool` + If `False`, skip constraints validation + + matchTags: :class:`bool` + If `False`, skip component tags matching + + matchConstraints: :class:`bool` + If `False`, skip component constraints matching + + Returns + ------- + self + + Raises + ------ + IndexError: + When idx > len(self) + """ + componentType = self._componentType + + try: + currentValue = self._componentValues[idx] + except IndexError: + currentValue = None + + if len(self._componentValues) < idx: + raise error.PyAsn1Error('Component index out of range') + + if value is None or value is noValue: + if componentType is not None: + value = componentType.clone() + elif currentValue is None: raise error.PyAsn1Error('Component type not defined') - if isinstance(self._componentType, base.AbstractSimpleAsn1Item): - value = self._componentType.clone(value=value) + elif not isinstance(value, base.Asn1Item): + if componentType is not None and isinstance(componentType, base.AbstractSimpleAsn1Item): + value = componentType.clone(value=value) + elif currentValue is not None and isinstance(currentValue, base.AbstractSimpleAsn1Item): + value = currentValue.clone(value=value) else: - raise error.PyAsn1Error('Instance value required') - if verifyConstraints: - if self._componentType is not None: - self._verifyComponent(idx, value) - self._verifySubtypeSpec(value, idx) - if self._componentValues[idx] is None: - self._componentValuesSet = self._componentValuesSet + 1 - self._componentValues[idx] = value + raise error.PyAsn1Error('%s undefined component type' % componentType.__class__.__name__) + elif componentType is not None: + if self.strictConstraints: + if not componentType.isSameTypeWith(value, matchTags, matchConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) + else: + if not componentType.isSuperTypeOf(value, matchTags, matchConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) + + if verifyConstraints and value.isValue: + try: + self._subtypeSpec(value, idx) + + except error.PyAsn1Error: + exType, exValue, exTb = sys.exc_info() + raise exType('%s at %s' % (exValue, self.__class__.__name__)) + + if currentValue is None: + self._componentValues.append(value) + else: + self._componentValues[idx] = value + return self - def getComponentTagMap(self): + @property + def componentTagMap(self): if self._componentType is not None: - return self._componentType.getTagMap() + return self._componentType.tagMap def prettyPrint(self, scope=0): - scope = scope + 1 - r = self.__class__.__name__ + ':\n' + scope += 1 + representation = self.__class__.__name__ + ':\n' for idx in range(len(self._componentValues)): - r = r + ' '*scope + representation += ' ' * scope if self._componentValues[idx] is None: - r = r + '' + representation += '' else: - r = r + self._componentValues[idx].prettyPrint(scope) - return r + representation += self._componentValues[idx].prettyPrint(scope) + return representation + + def prettyPrintType(self, scope=0): + scope += 1 + representation = '%s -> %s {\n' % (self.tagSet, self.__class__.__name__) + if self._componentType is not None: + representation += ' ' * scope + representation += self._componentType.prettyPrintType(scope) + return representation + '\n' + ' ' * (scope - 1) + '}' + + + @property + def isValue(self): + """Indicate if |ASN.1| object components represent ASN.1 type or ASN.1 value. + + The PyASN1 type objects can only participate in types comparison + and serve as a blueprint for serialization codecs to resolve + ambiguous types. + + The PyASN1 value objects can additionally participate in most + of built-in Python operations. + + Returns + ------- + : :class:`bool` + :class:`True` if all |ASN.1| components represent value and type, + :class:`False` if at least one |ASN.1| component represents just ASN.1 type. + """ + if not self._componentValues: + return False -class SequenceOf(SetOf): - tagSet = baseTagSet = tag.initTagSet( + for componentValue in self._componentValues: + if not componentValue.isValue: + return False + + return True + + +class SequenceOf(SequenceOfAndSetOfBase): + __doc__ = SequenceOfAndSetOfBase.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10) - ) - typeId = 2 + ) + + #: Default :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + #: object representing ASN.1 type allowed within |ASN.1| type + componentType = None + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing size constraint on |ASN.1| objects + sizeSpec = constraint.ConstraintsIntersection() + + # Disambiguation ASN.1 types identification + typeId = SequenceOfAndSetOfBase.getTypeId() + + +class SetOf(SequenceOfAndSetOfBase): + __doc__ = SequenceOfAndSetOfBase.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11) + ) + + #: Default :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + #: object representing ASN.1 type allowed within |ASN.1| type + componentType = None + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing size constraint on |ASN.1| objects + sizeSpec = constraint.ConstraintsIntersection() + + # Disambiguation ASN.1 types identification + typeId = SequenceOfAndSetOfBase.getTypeId() + class SequenceAndSetBase(base.AbstractConstructedAsn1Item): + """Create |ASN.1| type. + + |ASN.1| objects are mutable and duck-type Python :class:`dict` objects. + + Parameters + ---------- + componentType: :py:class:`~pyasn1.type.namedtype.NamedType` + Object holding named ASN.1 types allowed within this collection + + tagSet: :py:class:`~pyasn1.type.tag.TagSet` + Object representing non-default ASN.1 tag(s) + + subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing non-default ASN.1 subtype constraint(s) + + sizeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + Object representing collection size constraint + """ + #: Default :py:class:`~pyasn1.type.namedtype.NamedTypes` + #: object representing named ASN.1 types allowed within |ASN.1| type componentType = namedtype.NamedTypes() + def __init__(self, componentType=None, tagSet=None, subtypeSpec=None, sizeSpec=None): + if componentType is None: + componentType = self.componentType base.AbstractConstructedAsn1Item.__init__( self, componentType, tagSet, subtypeSpec, sizeSpec - ) - if self._componentType is None: - self._componentTypeLen = 0 - else: - self._componentTypeLen = len(self._componentType) + ) + self._componentTypeLen = len(self._componentType) def __getitem__(self, idx): - if isinstance(idx, str): + if octets.isStringType(idx): return self.getComponentByName(idx) else: return base.AbstractConstructedAsn1Item.__getitem__(self, idx) def __setitem__(self, idx, value): - if isinstance(idx, str): + if octets.isStringType(idx): self.setComponentByName(idx, value) else: base.AbstractConstructedAsn1Item.__setitem__(self, idx, value) - + + def __contains__(self, key): + return key in self._componentType + + def __iter__(self): + return iter(self._componentType) + + # Python dict protocol + + def values(self): + for idx in range(self._componentTypeLen): + yield self[idx] + + def keys(self): + return iter(self._componentType) + + def items(self): + for idx in range(self._componentTypeLen): + yield self._componentType[idx].getName(), self[idx] + + def update(self, *iterValue, **mappingValue): + for k, v in iterValue: + self[k] = v + for k in mappingValue: + self[k] = mappingValue[k] + + def clear(self): + self._componentValues = [] + def _cloneComponentValues(self, myClone, cloneValueFlag): - idx = 0; l = len(self._componentValues) - while idx < l: - c = self._componentValues[idx] - if c is not None: - if isinstance(c, base.AbstractConstructedAsn1Item): + for idx, componentValue in enumerate(self._componentValues): + if componentValue is not None: + if isinstance(componentValue, base.AbstractConstructedAsn1Item): myClone.setComponentByPosition( - idx, c.clone(cloneValueFlag=cloneValueFlag) - ) + idx, componentValue.clone(cloneValueFlag=cloneValueFlag) + ) else: - myClone.setComponentByPosition(idx, c.clone()) - idx = idx + 1 - - def _verifyComponent(self, idx, value): - if idx >= self._componentTypeLen: - raise error.PyAsn1Error( - 'Component type error out of range' - ) - t = self._componentType[idx].getType() - if not t.isSuperTypeOf(value): - raise error.PyAsn1Error('Component type error %r vs %r' % (t, value)) + myClone.setComponentByPosition(idx, componentValue.clone()) def getComponentByName(self, name): + """Returns |ASN.1| type component by name. + + Equivalent to Python :class:`dict` subscription operation (e.g. `[]`). + + Parameters + ---------- + name : :class:`str` + |ASN.1| type component name + + Returns + ------- + : :py:class:`~pyasn1.type.base.PyAsn1Item` + Instantiate |ASN.1| component type or return existing component value + """ return self.getComponentByPosition( self._componentType.getPositionByName(name) - ) - def setComponentByName(self, name, value=None, verifyConstraints=True): + ) + + def setComponentByName(self, name, value=noValue, + verifyConstraints=True, + matchTags=True, + matchConstraints=True): + """Assign |ASN.1| type component by name. + + Equivalent to Python :class:`dict` item assignment operation (e.g. `[]`). + + Parameters + ---------- + name: :class:`str` + |ASN.1| type component name + + value : :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A Python value to initialize |ASN.1| component with (if *componentType* is set) + or ASN.1 value object to assign to |ASN.1| component. + + verifyConstraints: :class:`bool` + If `False`, skip constraints validation + + matchTags: :class:`bool` + If `False`, skip component tags matching + + matchConstraints: :class:`bool` + If `False`, skip component constraints matching + + Returns + ------- + self + """ return self.setComponentByPosition( - self._componentType.getPositionByName(name), value, - verifyConstraints - ) + self._componentType.getPositionByName(name), value, verifyConstraints, matchTags, matchConstraints + ) def getComponentByPosition(self, idx): + """Returns |ASN.1| type component by index. + + Equivalent to Python sequence subscription operation (e.g. `[]`). + + Parameters + ---------- + idx : :class:`int` + Component index (zero-based). Must either refer to an existing + component or (if *componentType* is set) new ASN.1 type object gets + instantiated. + + Returns + ------- + : :py:class:`~pyasn1.type.base.PyAsn1Item` + a PyASN1 object + """ try: - return self._componentValues[idx] + componentValue = self._componentValues[idx] except IndexError: - if idx < self._componentTypeLen: - return - raise - def setComponentByPosition(self, idx, value=None, verifyConstraints=True): - l = len(self._componentValues) - if idx >= l: - self._componentValues = self._componentValues + (idx-l+1)*[None] - if value is None: - if self._componentValues[idx] is None: - self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone() - self._componentValuesSet = self._componentValuesSet + 1 - return self + componentValue = None + + if componentValue is None: + self.setComponentByPosition(idx) + + return self._componentValues[idx] + + def setComponentByPosition(self, idx, value=noValue, + verifyConstraints=True, + matchTags=True, + matchConstraints=True): + """Assign |ASN.1| type component by position. + + Equivalent to Python sequence item assignment operation (e.g. `[]`). + + Parameters + ---------- + idx : :class:`int` + Component index (zero-based). Must either refer to existing + component (if *componentType* is set) or to N+1 component + otherwise. In the latter case a new component of given ASN.1 + type gets instantiated and appended to |ASN.1| sequence. + + value : :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A Python value to initialize |ASN.1| component with (if *componentType* is set) + or ASN.1 value object to assign to |ASN.1| component. + + verifyConstraints : :class:`bool` + If `False`, skip constraints validation + + matchTags: :class:`bool` + If `False`, skip component tags matching + + matchConstraints: :class:`bool` + If `False`, skip component constraints matching + + Returns + ------- + self + """ + componentType = self._componentType + componentTypeLen = self._componentTypeLen + + try: + currentValue = self._componentValues[idx] + except IndexError: + currentValue = None + if componentTypeLen: + if componentTypeLen < idx: + raise IndexError('component index out of range') + self._componentValues = [None] * componentTypeLen + + if value is None or value is noValue: + if componentTypeLen: + value = componentType.getTypeByPosition(idx).clone() + elif currentValue is None: + raise error.PyAsn1Error('Component type not defined') elif not isinstance(value, base.Asn1Item): - t = self._componentType.getTypeByPosition(idx) - if isinstance(t, base.AbstractSimpleAsn1Item): - value = t.clone(value=value) + if componentTypeLen: + subComponentType = componentType.getTypeByPosition(idx) + if isinstance(subComponentType, base.AbstractSimpleAsn1Item): + value = subComponentType.clone(value=value) + else: + raise error.PyAsn1Error('%s can cast only scalar values' % componentType.__class__.__name__) + elif currentValue is not None and isinstance(currentValue, base.AbstractSimpleAsn1Item): + value = currentValue.clone(value=value) else: - raise error.PyAsn1Error('Instance value required') - if verifyConstraints: - if self._componentTypeLen: - self._verifyComponent(idx, value) - self._verifySubtypeSpec(value, idx) - if self._componentValues[idx] is None: - self._componentValuesSet = self._componentValuesSet + 1 - self._componentValues[idx] = value + raise error.PyAsn1Error('%s undefined component type' % componentType.__class__.__name__) + elif (matchTags or matchConstraints) and componentTypeLen: + subComponentType = componentType.getTypeByPosition(idx) + if subComponentType is not None: + if self.strictConstraints: + if not subComponentType.isSameTypeWith(value, matchTags, matchConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) + else: + if not subComponentType.isSuperTypeOf(value, matchTags, matchConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, componentType)) + + if verifyConstraints and value.isValue: + try: + self._subtypeSpec(value, idx) + + except error.PyAsn1Error: + exType, exValue, exTb = sys.exc_info() + raise exType('%s at %s' % (exValue, self.__class__.__name__)) + + if componentTypeLen: + self._componentValues[idx] = value + elif len(self._componentValues) == idx: + self._componentValues.append(value) + else: + raise error.PyAsn1Error('Component index out of range') + return self def getNameByPosition(self, idx): if self._componentTypeLen: return self._componentType.getNameByPosition(idx) - def getDefaultComponentByPosition(self, idx): - if self._componentTypeLen and self._componentType[idx].isDefaulted: - return self._componentType[idx].getType() - def getComponentType(self): if self._componentTypeLen: return self._componentType - - def setDefaultComponents(self): - if self._componentTypeLen == self._componentValuesSet: - return - idx = self._componentTypeLen - while idx: - idx = idx - 1 - if self._componentType[idx].isDefaulted: - if self.getComponentByPosition(idx) is None: - self.setComponentByPosition(idx) - elif not self._componentType[idx].isOptional: - if self.getComponentByPosition(idx) is None: - raise error.PyAsn1Error( - 'Uninitialized component #%s at %r' % (idx, self) - ) + + @property + def isValue(self): + """Indicate if |ASN.1| object components represent ASN.1 type or ASN.1 value. + + The PyASN1 type objects can only participate in types comparison + and serve as a blueprint for serialization codecs to resolve + ambiguous types. + + The PyASN1 value objects can additionally participate in most + of built-in Python operations. + + Returns + ------- + : :class:`bool` + :class:`True` if all |ASN.1| components represent value and type, + :class:`False` if at least one |ASN.1| component represents just ASN.1 type. + """ + componentType = self._componentType + + if componentType: + for idx, subComponentType in enumerate(componentType.namedTypes): + if subComponentType.isDefaulted or subComponentType.isOptional: + continue + if not self._componentValues or self._componentValues[idx] is None or not self._componentValues[idx].isValue: + return False + + else: + for componentValue in self._componentValues: + if not componentValue.isValue: + return False + + return True def prettyPrint(self, scope=0): - scope = scope + 1 - r = self.__class__.__name__ + ':\n' + """Return an object representation string. + + Returns + ------- + : :class:`str` + Human-friendly object representation. + """ + scope += 1 + representation = self.__class__.__name__ + ':\n' for idx in range(len(self._componentValues)): if self._componentValues[idx] is not None: - r = r + ' '*scope + representation += ' ' * scope componentType = self.getComponentType() if componentType is None: - r = r + '' + representation += '' else: - r = r + componentType.getNameByPosition(idx) - r = '%s=%s\n' % ( - r, self._componentValues[idx].prettyPrint(scope) - ) - return r + representation += componentType.getNameByPosition(idx) + representation = '%s=%s\n' % ( + representation, self._componentValues[idx].prettyPrint(scope) + ) + return representation + + def prettyPrintType(self, scope=0): + scope += 1 + representation = '%s -> %s {\n' % (self.tagSet, self.__class__.__name__) + for idx in range(len(self.componentType)): + representation += ' ' * scope + representation += '"%s"' % self.componentType.getNameByPosition(idx) + representation = '%s = %s\n' % ( + representation, self._componentType.getTypeByPosition(idx).prettyPrintType(scope) + ) + return representation + '\n' + ' ' * (scope - 1) + '}' + + # backward compatibility -- no-op + def setDefaultComponents(self): + return self + class Sequence(SequenceAndSetBase): - tagSet = baseTagSet = tag.initTagSet( + __doc__ = SequenceAndSetBase.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10) - ) - typeId = 3 + ) + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing constraints on |ASN.1| objects + sizeSpec = constraint.ConstraintsIntersection() + + #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`) + #: object imposing size constraint on |ASN.1| objects + componentType = namedtype.NamedTypes() + + # Disambiguation ASN.1 types identification + typeId = SequenceAndSetBase.getTypeId() def getComponentTagMapNearPosition(self, idx): if self._componentType: return self._componentType.getTagMapNearPosition(idx) - + def getComponentPositionNearType(self, tagSet, idx): if self._componentType: return self._componentType.getPositionNearType(tagSet, idx) else: return idx - + + class Set(SequenceAndSetBase): - tagSet = baseTagSet = tag.initTagSet( + __doc__ = SequenceAndSetBase.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.initTagSet( tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11) - ) - typeId = 4 + ) + + #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`) + #: object representing ASN.1 type allowed within |ASN.1| type + componentType = namedtype.NamedTypes() + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing constraints on |ASN.1| objects + sizeSpec = constraint.ConstraintsIntersection() + + # Disambiguation ASN.1 types identification + typeId = SequenceAndSetBase.getTypeId() + + def getComponent(self, innerFlag=False): + return self - def getComponent(self, innerFlag=0): return self - - def getComponentByType(self, tagSet, innerFlag=0): - c = self.getComponentByPosition( + def getComponentByType(self, tagSet, innerFlag=False): + """Returns |ASN.1| type component by ASN.1 tag. + + Parameters + ---------- + tagSet : :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tags to identify one of + |ASN.1| object component + + Returns + ------- + : :py:class:`~pyasn1.type.base.PyAsn1Item` + a pyasn1 object + """ + component = self.getComponentByPosition( self._componentType.getPositionByType(tagSet) - ) - if innerFlag and isinstance(c, Set): + ) + if innerFlag and isinstance(component, Set): # get inner component by inner tagSet - return c.getComponent(1) + return component.getComponent(innerFlag=True) else: # get outer component by inner tagSet - return c - - def setComponentByType(self, tagSet, value=None, innerFlag=0, - verifyConstraints=True): + return component + + def setComponentByType(self, tagSet, value=noValue, + verifyConstraints=True, + matchTags=True, + matchConstraints=True, + innerFlag=False): + """Assign |ASN.1| type component by ASN.1 tag. + + Parameters + ---------- + tagSet : :py:class:`~pyasn1.type.tag.TagSet` + Object representing ASN.1 tags to identify one of + |ASN.1| object component + + value : :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A Python value to initialize |ASN.1| component with (if *componentType* is set) + or ASN.1 value object to assign to |ASN.1| component. + + verifyConstraints : :class:`bool` + If `False`, skip constraints validation + + matchTags: :class:`bool` + If `False`, skip component tags matching + + matchConstraints: :class:`bool` + If `False`, skip component constraints matching + + innerFlag: :class:`bool` + If `True`, search for matching *tagSet* recursively. + + Returns + ------- + self + """ idx = self._componentType.getPositionByType(tagSet) - t = self._componentType.getTypeByPosition(idx) + if innerFlag: # set inner component by inner tagSet - if t.getTagSet(): + componentType = self._componentType.getTypeByPosition(idx) + + if componentType.tagSet: return self.setComponentByPosition( - idx, value, verifyConstraints - ) + idx, value, verifyConstraints, matchTags, matchConstraints + ) else: - t = self.setComponentByPosition(idx).getComponentByPosition(idx) - return t.setComponentByType( - tagSet, value, innerFlag, verifyConstraints - ) + componentType = self.getComponentByPosition(idx) + return componentType.setComponentByType( + tagSet, value, verifyConstraints, matchTags, matchConstraints, innerFlag=innerFlag + ) else: # set outer component by inner tagSet return self.setComponentByPosition( - idx, value, verifyConstraints - ) - - def getComponentTagMap(self): + idx, value, verifyConstraints, matchTags, matchConstraints + ) + + @property + def componentTagMap(self): if self._componentType: return self._componentType.getTagMap(True) @@ -896,116 +2524,202 @@ def getComponentPositionByType(self, tagSet): if self._componentType: return self._componentType.getPositionByType(tagSet) + class Choice(Set): - tagSet = baseTagSet = tag.TagSet() # untagged + __doc__ = Set.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.TagSet() # untagged + + #: Default collection of ASN.1 types of component (e.g. :py:class:`~pyasn1.type.namedtype.NamedType`) + #: object representing ASN.1 type allowed within |ASN.1| type + componentType = namedtype.NamedTypes() + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + #: Default :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` + #: object imposing size constraint on |ASN.1| objects sizeSpec = constraint.ConstraintsIntersection( constraint.ValueSizeConstraint(1, 1) - ) - typeId = 5 + ) + + # Disambiguation ASN.1 types identification + typeId = Set.getTypeId() + _currentIdx = None def __eq__(self, other): if self._componentValues: return self._componentValues[self._currentIdx] == other return NotImplemented + def __ne__(self, other): if self._componentValues: return self._componentValues[self._currentIdx] != other return NotImplemented + def __lt__(self, other): if self._componentValues: return self._componentValues[self._currentIdx] < other return NotImplemented + def __le__(self, other): if self._componentValues: return self._componentValues[self._currentIdx] <= other return NotImplemented + def __gt__(self, other): if self._componentValues: return self._componentValues[self._currentIdx] > other return NotImplemented + def __ge__(self, other): if self._componentValues: return self._componentValues[self._currentIdx] >= other return NotImplemented + if sys.version_info[0] <= 2: - def __nonzero__(self): return bool(self._componentValues) + def __nonzero__(self): + return self._componentValues and True or False else: - def __bool__(self): return bool(self._componentValues) + def __bool__(self): + return self._componentValues and True or False + + def __len__(self): + return self._currentIdx is not None and 1 or 0 + + def __contains__(self, key): + if self._currentIdx is None: + return False + return key == self._componentType[self._currentIdx].getName() + + def __iter__(self): + if self._currentIdx is None: + raise StopIteration + yield self._componentType[self._currentIdx].getName() + + # Python dict protocol + + def values(self): + if self._currentIdx is not None: + yield self._componentValues[self._currentIdx] + + def keys(self): + if self._currentIdx is not None: + yield self._componentType[self._currentIdx].getName() + + def items(self): + if self._currentIdx is not None: + yield self._componentType[self._currentIdx].getName(), self[self._currentIdx] - def __len__(self): return self._currentIdx is not None and 1 or 0 - def verifySizeSpec(self): if self._currentIdx is None: raise error.PyAsn1Error('Component not chosen') - else: - self._sizeSpec(' ') def _cloneComponentValues(self, myClone, cloneValueFlag): try: - c = self.getComponent() + component = self.getComponent() except error.PyAsn1Error: pass else: - if isinstance(c, Choice): - tagSet = c.getEffectiveTagSet() + if isinstance(component, Choice): + tagSet = component.effectiveTagSet else: - tagSet = c.getTagSet() - if isinstance(c, base.AbstractConstructedAsn1Item): + tagSet = component.tagSet + if isinstance(component, base.AbstractConstructedAsn1Item): myClone.setComponentByType( - tagSet, c.clone(cloneValueFlag=cloneValueFlag) - ) + tagSet, component.clone(cloneValueFlag=cloneValueFlag) + ) else: - myClone.setComponentByType(tagSet, c.clone()) + myClone.setComponentByType(tagSet, component.clone()) - def setComponentByPosition(self, idx, value=None, verifyConstraints=True): - l = len(self._componentValues) - if idx >= l: - self._componentValues = self._componentValues + (idx-l+1)*[None] - if self._currentIdx is not None: - self._componentValues[self._currentIdx] = None - if value is None: - if self._componentValues[idx] is None: - self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone() - self._componentValuesSet = 1 - self._currentIdx = idx - return self - elif not isinstance(value, base.Asn1Item): - value = self._componentType.getTypeByPosition(idx).clone( - value=value - ) - if verifyConstraints: - if self._componentTypeLen: - self._verifyComponent(idx, value) - self._verifySubtypeSpec(value, idx) - self._componentValues[idx] = value + def getComponentByPosition(self, idx): + __doc__ = Set.__doc__ + + if self._currentIdx is None or self._currentIdx != idx: + return Set.getComponentByPosition(self, idx) + + return self._componentValues[idx] + + def setComponentByPosition(self, idx, value=noValue, + verifyConstraints=True, + matchTags=True, + matchConstraints=True): + """Assign |ASN.1| type component by position. + + Equivalent to Python sequence item assignment operation (e.g. `[]`). + + Parameters + ---------- + idx: :class:`int` + Component index (zero-based). Must either refer to existing + component or to N+1 component. In the latter case a new component + type gets instantiated (if *componentType* is set, or given ASN.1 + object is taken otherwise) and appended to the |ASN.1| sequence. + + value: :class:`object` or :py:class:`~pyasn1.type.base.PyAsn1Item` derivative + A Python value to initialize |ASN.1| component with (if *componentType* is set) + or ASN.1 value object to assign to |ASN.1| component. Once a new value is + set to *idx* component, previous value is dropped. + + verifyConstraints : :class:`bool` + If `False`, skip constraints validation + + matchTags: :class:`bool` + If `False`, skip component tags matching + + matchConstraints: :class:`bool` + If `False`, skip component constraints matching + + Returns + ------- + self + """ + oldIdx = self._currentIdx + Set.setComponentByPosition(self, idx, value, verifyConstraints, matchTags, matchConstraints) self._currentIdx = idx - self._componentValuesSet = 1 + if oldIdx is not None and oldIdx != idx: + self._componentValues[oldIdx] = None return self def getMinTagSet(self): if self._tagSet: return self._tagSet else: - return self._componentType.genMinTagSet() + return self._componentType.minTagSet - def getEffectiveTagSet(self): + @property + def effectiveTagSet(self): + """Return a :class:`~pyasn1.type.tag.TagSet` object of the currently initialized component or self (if |ASN.1| is tagged).""" if self._tagSet: return self._tagSet else: - c = self.getComponent() - if isinstance(c, Choice): - return c.getEffectiveTagSet() - else: - return c.getTagSet() - - def getTagMap(self): + component = self.getComponent() + return component.effectiveTagSet + + @property + def tagMap(self): + """"Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping + ASN.1 tags to ASN.1 objects contained within callee. + """ if self._tagSet: - return Set.getTagMap(self) + return Set.tagMap.fget(self) else: - return Set.getComponentTagMap(self) + return Set.componentTagMap.fget(self) def getComponent(self, innerFlag=0): + """Return currently assigned component of the |ASN.1| object. + + Returns + ------- + : :py:class:`~pyasn1.type.base.PyAsn1Item` + a PyASN1 object + """ if self._currentIdx is None: raise error.PyAsn1Error('Component not chosen') else: @@ -1015,7 +2729,14 @@ def getComponent(self, innerFlag=0): else: return c - def getName(self, innerFlag=0): + def getName(self, innerFlag=False): + """Return the name of currently assigned component of the |ASN.1| object. + + Returns + ------- + : :py:class:`str` + |ASN.1| component name + """ if self._currentIdx is None: raise error.PyAsn1Error('Component not chosen') else: @@ -1025,18 +2746,61 @@ def getName(self, innerFlag=0): return c.getName(innerFlag) return self._componentType.getNameByPosition(self._currentIdx) - def setDefaultComponents(self): pass + @property + def isValue(self): + """Indicate if |ASN.1| component is set and represents ASN.1 type or ASN.1 value. + + The PyASN1 type objects can only participate in types comparison + and serve as a blueprint for serialization codecs to resolve + ambiguous types. + + The PyASN1 value objects can additionally participate in most + of built-in Python operations. + + Returns + ------- + : :class:`bool` + :class:`True` if |ASN.1| component is set and represent value and type, + :class:`False` if |ASN.1| component is not set or it represents just ASN.1 type. + """ + if self._currentIdx is None: + return False + + return self._componentValues[self._currentIdx].isValue + class Any(OctetString): - tagSet = baseTagSet = tag.TagSet() # untagged - typeId = 6 - - def getTagMap(self): - return tagmap.TagMap( - { self.getTagSet(): self }, - { eoo.endOfOctets.getTagSet(): eoo.endOfOctets }, - self + __doc__ = OctetString.__doc__ + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s) + #: associated with |ASN.1| type. + tagSet = tag.TagSet() # untagged + + #: Set (class attribute) or return (class or instance attribute) a + #: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` object + #: imposing constraints on |ASN.1| type initialization values. + subtypeSpec = constraint.ConstraintsIntersection() + + # Disambiguation ASN.1 types identification + typeId = OctetString.getTypeId() + + @property + def tagMap(self): + """"Return a :class:`~pyasn1.type.tagmap.TagMap` object mapping + ASN.1 tags to ASN.1 objects contained within callee. + """ + try: + return self._tagMap + + except AttributeError: + self._tagMap = tagmap.TagMap( + {self.tagSet: self}, + {eoo.endOfOctets.tagSet: eoo.endOfOctets}, + self ) + return self._tagMap + # XXX # coercion rules? diff --git a/src/lib/pyasn1/type/useful.py b/src/lib/pyasn1/type/useful.py index a7139c22c..0b79a983f 100644 --- a/src/lib/pyasn1/type/useful.py +++ b/src/lib/pyasn1/type/useful.py @@ -1,12 +1,39 @@ -# ASN.1 "useful" types -from pyasn1.type import char, tag +# +# This file is part of pyasn1 software. +# +# Copyright (c) 2005-2017, Ilya Etingof +# License: http://pyasn1.sf.net/license.html +# +from pyasn1.type import univ, char, tag + +__all__ = ['ObjectDescriptor', 'GeneralizedTime', 'UTCTime'] + +NoValue = univ.NoValue +noValue = univ.noValue + + +class ObjectDescriptor(char.GraphicString): + __doc__ = char.GraphicString.__doc__ + + #: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects + tagSet = char.GraphicString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7) + ) + class GeneralizedTime(char.VisibleString): + __doc__ = char.GraphicString.__doc__ + + #: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects tagSet = char.VisibleString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24) - ) + ) + class UTCTime(char.VisibleString): + __doc__ = char.GraphicString.__doc__ + + #: Default :py:class:`~pyasn1.type.tag.TagSet` object for |ASN.1| objects tagSet = char.VisibleString.tagSet.tagImplicitly( tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23) - ) + ) diff --git a/src/lib/pybitcointools/MANIFEST.in b/src/lib/pybitcointools/MANIFEST.in index 1aba38f67..70656b680 100644 --- a/src/lib/pybitcointools/MANIFEST.in +++ b/src/lib/pybitcointools/MANIFEST.in @@ -1 +1 @@ -include LICENSE +include bitcoin/english.txt diff --git a/src/lib/pybitcointools/README.txt b/src/lib/pybitcointools/README.md similarity index 100% rename from src/lib/pybitcointools/README.txt rename to src/lib/pybitcointools/README.md diff --git a/src/lib/pybitcointools/bitcoin/__init__.py b/src/lib/pybitcointools/bitcoin/__init__.py index 8b543fee5..7d529abc8 100644 --- a/src/lib/pybitcointools/bitcoin/__init__.py +++ b/src/lib/pybitcointools/bitcoin/__init__.py @@ -7,3 +7,4 @@ from .composite import * from .stealth import * from .blocks import * +from .mnemonic import * diff --git a/src/lib/pybitcointools/bitcoin/bci.py b/src/lib/pybitcointools/bitcoin/bci.py index 2ff11d931..79a2c401c 100644 --- a/src/lib/pybitcointools/bitcoin/bci.py +++ b/src/lib/pybitcointools/bitcoin/bci.py @@ -23,20 +23,76 @@ def make_request(*args): raise Exception(p) +def is_testnet(inp): + '''Checks if inp is a testnet address or if UTXO is a known testnet TxID''' + if isinstance(inp, (list, tuple)) and len(inp) >= 1: + return any([is_testnet(x) for x in inp]) + elif not isinstance(inp, basestring): # sanity check + raise TypeError("Input must be str/unicode, not type %s" % str(type(inp))) + + if not inp or (inp.lower() in ("btc", "testnet")): + pass + + ## ADDRESSES + if inp[0] in "123mn": + if re.match("^[2mn][a-km-zA-HJ-NP-Z0-9]{26,33}$", inp): + return True + elif re.match("^[13][a-km-zA-HJ-NP-Z0-9]{26,33}$", inp): + return False + else: + #sys.stderr.write("Bad address format %s") + return None + + ## TXID + elif re.match('^[0-9a-fA-F]{64}$', inp): + base_url = "http://api.blockcypher.com/v1/btc/{network}/txs/{txid}?includesHex=false" + try: + # try testnet fetchtx + make_request(base_url.format(network="test3", txid=inp.lower())) + return True + except: + # try mainnet fetchtx + make_request(base_url.format(network="main", txid=inp.lower())) + return False + sys.stderr.write("TxID %s has no match for testnet or mainnet (Bad TxID)") + return None + else: + raise TypeError("{0} is unknown input".format(inp)) + + +def set_network(*args): + '''Decides if args for unspent/fetchtx/pushtx are mainnet or testnet''' + r = [] + for arg in args: + if not arg: + pass + if isinstance(arg, basestring): + r.append(is_testnet(arg)) + elif isinstance(arg, (list, tuple)): + return set_network(*arg) + if any(r) and not all(r): + raise Exception("Mixed Testnet/Mainnet queries") + return "testnet" if any(r) else "btc" + + def parse_addr_args(*args): - # Valid input formats: blockr_unspent([addr1, addr2,addr3]) - # blockr_unspent(addr1, addr2, addr3) - # blockr_unspent([addr1, addr2, addr3], network) - # blockr_unspent(addr1, addr2, addr3, network) - # Where network is 'btc' or 'testnet' - network = 'btc' + # Valid input formats: unspent([addr1, addr2, addr3]) + # unspent([addr1, addr2, addr3], network) + # unspent(addr1, addr2, addr3) + # unspent(addr1, addr2, addr3, network) addr_args = args + network = "btc" + if len(args) == 0: + return [], 'btc' if len(args) >= 1 and args[-1] in ('testnet', 'btc'): network = args[-1] addr_args = args[:-1] if len(addr_args) == 1 and isinstance(addr_args, list): + network = set_network(*addr_args[0]) addr_args = addr_args[0] - + if addr_args and isinstance(addr_args, tuple) and isinstance(addr_args[0], list): + addr_args = addr_args[0] + network = set_network(addr_args) return network, addr_args @@ -46,14 +102,14 @@ def bci_unspent(*args): u = [] for a in addrs: try: - data = make_request('https://blockchain.info/unspent?address='+a) + data = make_request('https://blockchain.info/unspent?active='+a) except Exception as e: if str(e) == 'No free outputs to spend': continue else: raise Exception(e) try: - jsonobj = json.loads(data) + jsonobj = json.loads(data.decode("utf-8")) for o in jsonobj["unspent_outputs"]: h = o['tx_hash'].decode('hex')[::-1].encode('hex') u.append({ @@ -74,9 +130,9 @@ def blockr_unspent(*args): network, addr_args = parse_addr_args(*args) if network == 'testnet': - blockr_url = 'https://tbtc.blockr.io/api/v1/address/unspent/' + blockr_url = 'http://tbtc.blockr.io/api/v1/address/unspent/' elif network == 'btc': - blockr_url = 'https://btc.blockr.io/api/v1/address/unspent/' + blockr_url = 'http://btc.blockr.io/api/v1/address/unspent/' else: raise Exception( 'Unsupported network {0} for blockr_unspent'.format(network)) @@ -88,7 +144,7 @@ def blockr_unspent(*args): else: addrs = addr_args res = make_request(blockr_url+','.join(addrs)) - data = json.loads(res)['data'] + data = json.loads(res.decode("utf-8"))['data'] o = [] if 'unspent' in data: data = [data] @@ -102,7 +158,7 @@ def blockr_unspent(*args): def helloblock_unspent(*args): - network, addrs = parse_addr_args(*args) + addrs, network = parse_addr_args(*args) if network == 'testnet': url = 'https://testnet.helloblock.io/v1/addresses/%s/unspents?limit=500&offset=%s' elif network == 'btc': @@ -111,7 +167,7 @@ def helloblock_unspent(*args): for addr in addrs: for offset in xrange(0, 10**9, 500): res = make_request(url % (addr, offset)) - data = json.loads(res)["data"] + data = json.loads(res.decode("utf-8"))["data"] if not len(data["unspents"]): break elif offset: @@ -152,11 +208,21 @@ def history(*args): for addr in addrs: offset = 0 while 1: - data = make_request( - 'https://blockchain.info/address/%s?format=json&offset=%s' % - (addr, offset)) + gathered = False + while not gathered: + try: + data = make_request( + 'https://blockchain.info/address/%s?format=json&offset=%s' % + (addr, offset)) + gathered = True + except Exception as e: + try: + sys.stderr.write(e.read().strip()) + except: + sys.stderr.write(str(e)) + gathered = False try: - jsonobj = json.loads(data) + jsonobj = json.loads(data.decode("utf-8")) except: raise Exception("Failed to decode data: "+data) txs.extend(jsonobj["txs"]) @@ -167,7 +233,7 @@ def history(*args): outs = {} for tx in txs: for o in tx["out"]: - if o['addr'] in addrs: + if o.get('addr', None) in addrs: key = str(tx["tx_index"])+':'+str(o["n"]) outs[key] = { "address": o["addr"], @@ -177,11 +243,12 @@ def history(*args): } for tx in txs: for i, inp in enumerate(tx["inputs"]): - if inp["prev_out"]["addr"] in addrs: - key = str(inp["prev_out"]["tx_index"]) + \ - ':'+str(inp["prev_out"]["n"]) - if outs.get(key): - outs[key]["spend"] = tx["hash"]+':'+str(i) + if "prev_out" in inp: + if inp["prev_out"].get("addr", None) in addrs: + key = str(inp["prev_out"]["tx_index"]) + \ + ':'+str(inp["prev_out"]["n"]) + if outs.get(key): + outs[key]["spend"] = tx["hash"]+':'+str(i) return [outs[k] for k in outs] @@ -207,9 +274,9 @@ def eligius_pushtx(tx): def blockr_pushtx(tx, network='btc'): if network == 'testnet': - blockr_url = 'https://tbtc.blockr.io/api/v1/tx/push' + blockr_url = 'http://tbtc.blockr.io/api/v1/tx/push' elif network == 'btc': - blockr_url = 'https://btc.blockr.io/api/v1/tx/push' + blockr_url = 'http://btc.blockr.io/api/v1/tx/push' else: raise Exception( 'Unsupported network {0} for blockr_pushtx'.format(network)) @@ -237,14 +304,21 @@ def pushtx(*args, **kwargs): return f(*args) -def last_block_height(): +def last_block_height(network='btc'): + if network == 'testnet': + data = make_request('http://tbtc.blockr.io/api/v1/block/info/last') + jsonobj = json.loads(data.decode("utf-8")) + return jsonobj["data"]["nb"] + data = make_request('https://blockchain.info/latestblock') - jsonobj = json.loads(data) + jsonobj = json.loads(data.decode("utf-8")) return jsonobj["height"] # Gets a specific transaction def bci_fetchtx(txhash): + if isinstance(txhash, list): + return [bci_fetchtx(h) for h in txhash] if not re.match('^[0-9a-fA-F]*$', txhash): txhash = txhash.encode('hex') data = make_request('https://blockchain.info/rawtx/'+txhash+'?format=hex') @@ -253,19 +327,27 @@ def bci_fetchtx(txhash): def blockr_fetchtx(txhash, network='btc'): if network == 'testnet': - blockr_url = 'https://tbtc.blockr.io/api/v1/tx/raw/' + blockr_url = 'http://tbtc.blockr.io/api/v1/tx/raw/' elif network == 'btc': - blockr_url = 'https://btc.blockr.io/api/v1/tx/raw/' + blockr_url = 'http://btc.blockr.io/api/v1/tx/raw/' else: raise Exception( 'Unsupported network {0} for blockr_fetchtx'.format(network)) - if not re.match('^[0-9a-fA-F]*$', txhash): - txhash = txhash.encode('hex') - jsondata = json.loads(make_request(blockr_url+txhash)) - return jsondata['data']['tx']['hex'] + if isinstance(txhash, list): + txhash = ','.join([x.encode('hex') if not re.match('^[0-9a-fA-F]*$', x) + else x for x in txhash]) + jsondata = json.loads(make_request(blockr_url+txhash).decode("utf-8")) + return [d['tx']['hex'] for d in jsondata['data']] + else: + if not re.match('^[0-9a-fA-F]*$', txhash): + txhash = txhash.encode('hex') + jsondata = json.loads(make_request(blockr_url+txhash).decode("utf-8")) + return jsondata['data']['tx']['hex'] def helloblock_fetchtx(txhash, network='btc'): + if isinstance(txhash, list): + return [helloblock_fetchtx(h) for h in txhash] if not re.match('^[0-9a-fA-F]*$', txhash): txhash = txhash.encode('hex') if network == 'testnet': @@ -275,7 +357,7 @@ def helloblock_fetchtx(txhash, network='btc'): else: raise Exception( 'Unsupported network {0} for helloblock_fetchtx'.format(network)) - data = json.loads(make_request(url + txhash))["data"]["transaction"] + data = json.loads(make_request(url + txhash).decode("utf-8"))["data"]["transaction"] o = { "locktime": data["locktime"], "version": data["version"], @@ -296,8 +378,8 @@ def helloblock_fetchtx(txhash, network='btc'): "value": outp["value"], "script": outp["scriptPubKey"] }) - from bitcoin.transaction import serialize - from bitcoin.transaction import txhash as TXHASH + from .transaction import serialize + from .transaction import txhash as TXHASH tx = serialize(o) assert TXHASH(tx) == txhash return tx @@ -325,7 +407,7 @@ def firstbits(address): def get_block_at_height(height): j = json.loads(make_request("https://blockchain.info/block-height/" + - str(height)+"?format=json")) + str(height)+"?format=json").decode("utf-8")) for b in j['blocks']: if b['main_chain'] is True: return b @@ -337,10 +419,10 @@ def _get_block(inp): return get_block_at_height(inp) else: return json.loads(make_request( - 'https://blockchain.info/rawblock/'+inp)) + 'https://blockchain.info/rawblock/'+inp).decode("utf-8")) -def get_block_header_data(inp): +def bci_get_block_header_data(inp): j = _get_block(inp) return { 'version': j['ver'], @@ -354,14 +436,14 @@ def get_block_header_data(inp): def blockr_get_block_header_data(height, network='btc'): if network == 'testnet': - blockr_url = "https://tbtc.blockr.io/api/v1/block/raw/" + blockr_url = "http://tbtc.blockr.io/api/v1/block/raw/" elif network == 'btc': - blockr_url = "https://btc.blockr.io/api/v1/block/raw/" + blockr_url = "http://btc.blockr.io/api/v1/block/raw/" else: raise Exception( 'Unsupported network {0} for blockr_get_block_header_data'.format(network)) - k = json.loads(make_request(blockr_url + str(height))) + k = json.loads(make_request(blockr_url + str(height)).decode("utf-8")) j = k['data'] return { 'version': j['version'], @@ -373,6 +455,40 @@ def blockr_get_block_header_data(height, network='btc'): 'nonce': j['nonce'], } + +def get_block_timestamp(height, network='btc'): + if network == 'testnet': + blockr_url = "http://tbtc.blockr.io/api/v1/block/info/" + elif network == 'btc': + blockr_url = "http://btc.blockr.io/api/v1/block/info/" + else: + raise Exception( + 'Unsupported network {0} for get_block_timestamp'.format(network)) + + import time, calendar + if isinstance(height, list): + k = json.loads(make_request(blockr_url + ','.join([str(x) for x in height])).decode("utf-8")) + o = {x['nb']: calendar.timegm(time.strptime(x['time_utc'], + "%Y-%m-%dT%H:%M:%SZ")) for x in k['data']} + return [o[x] for x in height] + else: + k = json.loads(make_request(blockr_url + str(height)).decode("utf-8")) + j = k['data']['time_utc'] + return calendar.timegm(time.strptime(j, "%Y-%m-%dT%H:%M:%SZ")) + + +block_header_data_getters = { + 'bci': bci_get_block_header_data, + 'blockr': blockr_get_block_header_data +} + + +def get_block_header_data(inp, **kwargs): + f = block_header_data_getters.get(kwargs.get('source', ''), + bci_get_block_header_data) + return f(inp, **kwargs) + + def get_txs_in_block(inp): j = _get_block(inp) hashes = [t['hash'] for t in j['tx']] @@ -380,5 +496,33 @@ def get_txs_in_block(inp): def get_block_height(txhash): - j = json.loads(make_request('https://blockchain.info/rawtx/'+txhash)) + j = json.loads(make_request('https://blockchain.info/rawtx/'+txhash).decode("utf-8")) return j['block_height'] + +# fromAddr, toAddr, 12345, changeAddress +def get_tx_composite(inputs, outputs, output_value, change_address=None, network=None): + """mktx using blockcypher API""" + inputs = [inputs] if not isinstance(inputs, list) else inputs + outputs = [outputs] if not isinstance(outputs, list) else outputs + network = set_network(change_address or inputs) if not network else network.lower() + url = "http://api.blockcypher.com/v1/btc/{network}/txs/new?includeToSignTx=true".format( + network=('test3' if network=='testnet' else 'main')) + is_address = lambda a: bool(re.match("^[123mn][a-km-zA-HJ-NP-Z0-9]{26,33}$", a)) + if any([is_address(x) for x in inputs]): + inputs_type = 'addresses' # also accepts UTXOs, only addresses supported presently + if any([is_address(x) for x in outputs]): + outputs_type = 'addresses' # TODO: add UTXO support + data = { + 'inputs': [{inputs_type: inputs}], + 'confirmations': 0, + 'preference': 'high', + 'outputs': [{outputs_type: outputs, "value": output_value}] + } + if change_address: + data["change_address"] = change_address # + jdata = json.loads(make_request(url, data)) + hash, txh = jdata.get("tosign")[0], jdata.get("tosign_tx")[0] + assert bin_dbl_sha256(txh.decode('hex')).encode('hex') == hash, "checksum mismatch %s" % hash + return txh.encode("utf-8") + +blockcypher_mktx = get_tx_composite diff --git a/src/lib/pybitcointools/bitcoin/composite.py b/src/lib/pybitcointools/bitcoin/composite.py index 0df0e0794..e5d50492a 100644 --- a/src/lib/pybitcointools/bitcoin/composite.py +++ b/src/lib/pybitcointools/bitcoin/composite.py @@ -6,12 +6,12 @@ # Takes privkey, address, value (satoshis), fee (satoshis) -def send(frm, to, value, fee=10000): - return sendmultitx(frm, to + ":" + str(value), fee) +def send(frm, to, value, fee=10000, **kwargs): + return sendmultitx(frm, to + ":" + str(value), fee, **kwargs) # Takes privkey, "address1:value1,address2:value2" (satoshis), fee (satoshis) -def sendmultitx(frm, tovalues, fee=10000, **kwargs): +def sendmultitx(frm, *args, **kwargs): tv, fee = args[:-1], int(args[-1]) outs = [] outvalue = 0 @@ -21,7 +21,7 @@ def sendmultitx(frm, tovalues, fee=10000, **kwargs): u = unspent(privtoaddr(frm), **kwargs) u2 = select(u, int(outvalue)+int(fee)) - argz = u2 + outs + [frm, fee] + argz = u2 + outs + [privtoaddr(frm), fee] tx = mksend(*argz) tx2 = signall(tx, frm) return pushtx(tx2, **kwargs) diff --git a/src/lib/pybitcointools/bitcoin/english.txt b/src/lib/pybitcointools/bitcoin/english.txt new file mode 100644 index 000000000..942040ed5 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/english.txt @@ -0,0 +1,2048 @@ +abandon +ability +able +about +above +absent +absorb +abstract +absurd +abuse +access +accident +account +accuse +achieve +acid +acoustic +acquire +across +act +action +actor +actress +actual +adapt +add +addict +address +adjust +admit +adult +advance +advice +aerobic +affair +afford +afraid +again +age +agent +agree +ahead +aim +air +airport +aisle +alarm +album +alcohol +alert +alien +all +alley +allow +almost +alone +alpha +already +also +alter +always +amateur +amazing +among +amount +amused +analyst +anchor +ancient +anger +angle +angry +animal +ankle +announce +annual +another +answer +antenna +antique +anxiety +any +apart +apology +appear +apple +approve +april +arch +arctic +area +arena +argue +arm +armed +armor +army +around +arrange +arrest +arrive +arrow +art +artefact +artist +artwork +ask +aspect +assault +asset +assist +assume +asthma +athlete +atom +attack +attend +attitude +attract +auction +audit +august +aunt +author +auto +autumn +average +avocado +avoid +awake +aware +away +awesome +awful +awkward +axis +baby +bachelor +bacon +badge +bag +balance +balcony +ball +bamboo +banana +banner +bar +barely +bargain +barrel +base +basic +basket +battle +beach +bean +beauty +because +become +beef +before +begin +behave +behind +believe +below +belt +bench +benefit +best +betray +better +between +beyond +bicycle +bid +bike +bind +biology +bird +birth +bitter +black +blade +blame +blanket +blast +bleak +bless +blind +blood +blossom +blouse +blue +blur +blush +board +boat +body +boil +bomb +bone +bonus +book +boost +border +boring +borrow +boss +bottom +bounce +box +boy +bracket +brain +brand +brass +brave +bread +breeze +brick +bridge +brief +bright +bring +brisk +broccoli +broken +bronze +broom +brother +brown +brush +bubble +buddy +budget +buffalo +build +bulb +bulk +bullet +bundle +bunker +burden +burger +burst +bus +business +busy +butter +buyer +buzz +cabbage +cabin +cable +cactus +cage +cake +call +calm +camera +camp +can +canal +cancel +candy +cannon +canoe +canvas +canyon +capable +capital +captain +car +carbon +card +cargo +carpet +carry +cart +case +cash +casino +castle +casual +cat +catalog +catch +category +cattle +caught +cause +caution +cave +ceiling +celery +cement +census +century +cereal +certain +chair +chalk +champion +change +chaos +chapter +charge +chase +chat +cheap +check +cheese +chef +cherry +chest +chicken +chief +child +chimney +choice +choose +chronic +chuckle +chunk +churn +cigar +cinnamon +circle +citizen +city +civil +claim +clap +clarify +claw +clay +clean +clerk +clever +click +client +cliff +climb +clinic +clip +clock +clog +close +cloth +cloud +clown +club +clump +cluster +clutch +coach +coast +coconut +code +coffee +coil +coin +collect +color +column +combine +come +comfort +comic +common +company +concert +conduct +confirm +congress +connect +consider +control +convince +cook +cool +copper +copy +coral +core +corn +correct +cost +cotton +couch +country +couple +course +cousin +cover +coyote +crack +cradle +craft +cram +crane +crash +crater +crawl +crazy +cream +credit +creek +crew +cricket +crime +crisp +critic +crop +cross +crouch +crowd +crucial +cruel +cruise +crumble +crunch +crush +cry +crystal +cube +culture +cup +cupboard +curious +current +curtain +curve +cushion +custom +cute +cycle +dad +damage +damp +dance +danger +daring +dash +daughter +dawn +day +deal +debate +debris +decade +december +decide +decline +decorate +decrease +deer +defense +define +defy +degree +delay +deliver +demand +demise +denial +dentist +deny +depart +depend +deposit +depth +deputy +derive +describe +desert +design +desk +despair +destroy +detail +detect +develop +device +devote +diagram +dial +diamond +diary +dice +diesel +diet +differ +digital +dignity +dilemma +dinner +dinosaur +direct +dirt +disagree +discover +disease +dish +dismiss +disorder +display +distance +divert +divide +divorce +dizzy +doctor +document +dog +doll +dolphin +domain +donate +donkey +donor +door +dose +double +dove +draft +dragon +drama +drastic +draw +dream +dress +drift +drill +drink +drip +drive +drop +drum +dry +duck +dumb +dune +during +dust +dutch +duty +dwarf +dynamic +eager +eagle +early +earn +earth +easily +east +easy +echo +ecology +economy +edge +edit +educate +effort +egg +eight +either +elbow +elder +electric +elegant +element +elephant +elevator +elite +else +embark +embody +embrace +emerge +emotion +employ +empower +empty +enable +enact +end +endless +endorse +enemy +energy +enforce +engage +engine +enhance +enjoy +enlist +enough +enrich +enroll +ensure +enter +entire +entry +envelope +episode +equal +equip +era +erase +erode +erosion +error +erupt +escape +essay +essence +estate +eternal +ethics +evidence +evil +evoke +evolve +exact +example +excess +exchange +excite +exclude +excuse +execute +exercise +exhaust +exhibit +exile +exist +exit +exotic +expand +expect +expire +explain +expose +express +extend +extra +eye +eyebrow +fabric +face +faculty +fade +faint +faith +fall +false +fame +family +famous +fan +fancy +fantasy +farm +fashion +fat +fatal +father +fatigue +fault +favorite +feature +february +federal +fee +feed +feel +female +fence +festival +fetch +fever +few +fiber +fiction +field +figure +file +film +filter +final +find +fine +finger +finish +fire +firm +first +fiscal +fish +fit +fitness +fix +flag +flame +flash +flat +flavor +flee +flight +flip +float +flock +floor +flower +fluid +flush +fly +foam +focus +fog +foil +fold +follow +food +foot +force +forest +forget +fork +fortune +forum +forward +fossil +foster +found +fox +fragile +frame +frequent +fresh +friend +fringe +frog +front +frost +frown +frozen +fruit +fuel +fun +funny +furnace +fury +future +gadget +gain +galaxy +gallery +game +gap +garage +garbage +garden +garlic +garment +gas +gasp +gate +gather +gauge +gaze +general +genius +genre +gentle +genuine +gesture +ghost +giant +gift +giggle +ginger +giraffe +girl +give +glad +glance +glare +glass +glide +glimpse +globe +gloom +glory +glove +glow +glue +goat +goddess +gold +good +goose +gorilla +gospel +gossip +govern +gown +grab +grace +grain +grant +grape +grass +gravity +great +green +grid +grief +grit +grocery +group +grow +grunt +guard +guess +guide +guilt +guitar +gun +gym +habit +hair +half +hammer +hamster +hand +happy +harbor +hard +harsh +harvest +hat +have +hawk +hazard +head +health +heart +heavy +hedgehog +height +hello +helmet +help +hen +hero +hidden +high +hill +hint +hip +hire +history +hobby +hockey +hold +hole +holiday +hollow +home +honey +hood +hope +horn +horror +horse +hospital +host +hotel +hour +hover +hub +huge +human +humble +humor +hundred +hungry +hunt +hurdle +hurry +hurt +husband +hybrid +ice +icon +idea +identify +idle +ignore +ill +illegal +illness +image +imitate +immense +immune +impact +impose +improve +impulse +inch +include +income +increase +index +indicate +indoor +industry +infant +inflict +inform +inhale +inherit +initial +inject +injury +inmate +inner +innocent +input +inquiry +insane +insect +inside +inspire +install +intact +interest +into +invest +invite +involve +iron +island +isolate +issue +item +ivory +jacket +jaguar +jar +jazz +jealous +jeans +jelly +jewel +job +join +joke +journey +joy +judge +juice +jump +jungle +junior +junk +just +kangaroo +keen +keep +ketchup +key +kick +kid +kidney +kind +kingdom +kiss +kit +kitchen +kite +kitten +kiwi +knee +knife +knock +know +lab +label +labor +ladder +lady +lake +lamp +language +laptop +large +later +latin +laugh +laundry +lava +law +lawn +lawsuit +layer +lazy +leader +leaf +learn +leave +lecture +left +leg +legal +legend +leisure +lemon +lend +length +lens +leopard +lesson +letter +level +liar +liberty +library +license +life +lift +light +like +limb +limit +link +lion +liquid +list +little +live +lizard +load +loan +lobster +local +lock +logic +lonely +long +loop +lottery +loud +lounge +love +loyal +lucky +luggage +lumber +lunar +lunch +luxury +lyrics +machine +mad +magic +magnet +maid +mail +main +major +make +mammal +man +manage +mandate +mango +mansion +manual +maple +marble +march +margin +marine +market +marriage +mask +mass +master +match +material +math +matrix +matter +maximum +maze +meadow +mean +measure +meat +mechanic +medal +media +melody +melt +member +memory +mention +menu +mercy +merge +merit +merry +mesh +message +metal +method +middle +midnight +milk +million +mimic +mind +minimum +minor +minute +miracle +mirror +misery +miss +mistake +mix +mixed +mixture +mobile +model +modify +mom +moment +monitor +monkey +monster +month +moon +moral +more +morning +mosquito +mother +motion +motor +mountain +mouse +move +movie +much +muffin +mule +multiply +muscle +museum +mushroom +music +must +mutual +myself +mystery +myth +naive +name +napkin +narrow +nasty +nation +nature +near +neck +need +negative +neglect +neither +nephew +nerve +nest +net +network +neutral +never +news +next +nice +night +noble +noise +nominee +noodle +normal +north +nose +notable +note +nothing +notice +novel +now +nuclear +number +nurse +nut +oak +obey +object +oblige +obscure +observe +obtain +obvious +occur +ocean +october +odor +off +offer +office +often +oil +okay +old +olive +olympic +omit +once +one +onion +online +only +open +opera +opinion +oppose +option +orange +orbit +orchard +order +ordinary +organ +orient +original +orphan +ostrich +other +outdoor +outer +output +outside +oval +oven +over +own +owner +oxygen +oyster +ozone +pact +paddle +page +pair +palace +palm +panda +panel +panic +panther +paper +parade +parent +park +parrot +party +pass +patch +path +patient +patrol +pattern +pause +pave +payment +peace +peanut +pear +peasant +pelican +pen +penalty +pencil +people +pepper +perfect +permit +person +pet +phone +photo +phrase +physical +piano +picnic +picture +piece +pig +pigeon +pill +pilot +pink +pioneer +pipe +pistol +pitch +pizza +place +planet +plastic +plate +play +please +pledge +pluck +plug +plunge +poem +poet +point +polar +pole +police +pond +pony +pool +popular +portion +position +possible +post +potato +pottery +poverty +powder +power +practice +praise +predict +prefer +prepare +present +pretty +prevent +price +pride +primary +print +priority +prison +private +prize +problem +process +produce +profit +program +project +promote +proof +property +prosper +protect +proud +provide +public +pudding +pull +pulp +pulse +pumpkin +punch +pupil +puppy +purchase +purity +purpose +purse +push +put +puzzle +pyramid +quality +quantum +quarter +question +quick +quit +quiz +quote +rabbit +raccoon +race +rack +radar +radio +rail +rain +raise +rally +ramp +ranch +random +range +rapid +rare +rate +rather +raven +raw +razor +ready +real +reason +rebel +rebuild +recall +receive +recipe +record +recycle +reduce +reflect +reform +refuse +region +regret +regular +reject +relax +release +relief +rely +remain +remember +remind +remove +render +renew +rent +reopen +repair +repeat +replace +report +require +rescue +resemble +resist +resource +response +result +retire +retreat +return +reunion +reveal +review +reward +rhythm +rib +ribbon +rice +rich +ride +ridge +rifle +right +rigid +ring +riot +ripple +risk +ritual +rival +river +road +roast +robot +robust +rocket +romance +roof +rookie +room +rose +rotate +rough +round +route +royal +rubber +rude +rug +rule +run +runway +rural +sad +saddle +sadness +safe +sail +salad +salmon +salon +salt +salute +same +sample +sand +satisfy +satoshi +sauce +sausage +save +say +scale +scan +scare +scatter +scene +scheme +school +science +scissors +scorpion +scout +scrap +screen +script +scrub +sea +search +season +seat +second +secret +section +security +seed +seek +segment +select +sell +seminar +senior +sense +sentence +series +service +session +settle +setup +seven +shadow +shaft +shallow +share +shed +shell +sheriff +shield +shift +shine +ship +shiver +shock +shoe +shoot +shop +short +shoulder +shove +shrimp +shrug +shuffle +shy +sibling +sick +side +siege +sight +sign +silent +silk +silly +silver +similar +simple +since +sing +siren +sister +situate +six +size +skate +sketch +ski +skill +skin +skirt +skull +slab +slam +sleep +slender +slice +slide +slight +slim +slogan +slot +slow +slush +small +smart +smile +smoke +smooth +snack +snake +snap +sniff +snow +soap +soccer +social +sock +soda +soft +solar +soldier +solid +solution +solve +someone +song +soon +sorry +sort +soul +sound +soup +source +south +space +spare +spatial +spawn +speak +special +speed +spell +spend +sphere +spice +spider +spike +spin +spirit +split +spoil +sponsor +spoon +sport +spot +spray +spread +spring +spy +square +squeeze +squirrel +stable +stadium +staff +stage +stairs +stamp +stand +start +state +stay +steak +steel +stem +step +stereo +stick +still +sting +stock +stomach +stone +stool +story +stove +strategy +street +strike +strong +struggle +student +stuff +stumble +style +subject +submit +subway +success +such +sudden +suffer +sugar +suggest +suit +summer +sun +sunny +sunset +super +supply +supreme +sure +surface +surge +surprise +surround +survey +suspect +sustain +swallow +swamp +swap +swarm +swear +sweet +swift +swim +swing +switch +sword +symbol +symptom +syrup +system +table +tackle +tag +tail +talent +talk +tank +tape +target +task +taste +tattoo +taxi +teach +team +tell +ten +tenant +tennis +tent +term +test +text +thank +that +theme +then +theory +there +they +thing +this +thought +three +thrive +throw +thumb +thunder +ticket +tide +tiger +tilt +timber +time +tiny +tip +tired +tissue +title +toast +tobacco +today +toddler +toe +together +toilet +token +tomato +tomorrow +tone +tongue +tonight +tool +tooth +top +topic +topple +torch +tornado +tortoise +toss +total +tourist +toward +tower +town +toy +track +trade +traffic +tragic +train +transfer +trap +trash +travel +tray +treat +tree +trend +trial +tribe +trick +trigger +trim +trip +trophy +trouble +truck +true +truly +trumpet +trust +truth +try +tube +tuition +tumble +tuna +tunnel +turkey +turn +turtle +twelve +twenty +twice +twin +twist +two +type +typical +ugly +umbrella +unable +unaware +uncle +uncover +under +undo +unfair +unfold +unhappy +uniform +unique +unit +universe +unknown +unlock +until +unusual +unveil +update +upgrade +uphold +upon +upper +upset +urban +urge +usage +use +used +useful +useless +usual +utility +vacant +vacuum +vague +valid +valley +valve +van +vanish +vapor +various +vast +vault +vehicle +velvet +vendor +venture +venue +verb +verify +version +very +vessel +veteran +viable +vibrant +vicious +victory +video +view +village +vintage +violin +virtual +virus +visa +visit +visual +vital +vivid +vocal +voice +void +volcano +volume +vote +voyage +wage +wagon +wait +walk +wall +walnut +want +warfare +warm +warrior +wash +wasp +waste +water +wave +way +wealth +weapon +wear +weasel +weather +web +wedding +weekend +weird +welcome +west +wet +whale +what +wheat +wheel +when +where +whip +whisper +wide +width +wife +wild +will +win +window +wine +wing +wink +winner +winter +wire +wisdom +wise +wish +witness +wolf +woman +wonder +wood +wool +word +work +world +worry +worth +wrap +wreck +wrestle +wrist +write +wrong +yard +year +yellow +you +young +youth +zebra +zero +zone +zoo diff --git a/src/lib/pybitcointools/bitcoin/main.py b/src/lib/pybitcointools/bitcoin/main.py index 2b8bdd04a..8cf3a9f7d 100644 --- a/src/lib/pybitcointools/bitcoin/main.py +++ b/src/lib/pybitcointools/bitcoin/main.py @@ -122,7 +122,7 @@ def jacobian_add(p, q): U1H2 = (U1 * H2) % P nx = (R ** 2 - H3 - 2 * U1H2) % P ny = (R * (U1H2 - nx) - S1 * H3) % P - nz = H * p[2] * q[2] + nz = (H * p[2] * q[2]) % P return (nx, ny, nz) @@ -179,10 +179,10 @@ def encode_pubkey(pub, formt): pub = decode_pubkey(pub) if formt == 'decimal': return pub elif formt == 'bin': return b'\x04' + encode(pub[0], 256, 32) + encode(pub[1], 256, 32) - elif formt == 'bin_compressed': + elif formt == 'bin_compressed': return from_int_to_byte(2+(pub[1] % 2)) + encode(pub[0], 256, 32) elif formt == 'hex': return '04' + encode(pub[0], 16, 64) + encode(pub[1], 16, 64) - elif formt == 'hex_compressed': + elif formt == 'hex_compressed': return '0'+str(2+(pub[1] % 2)) + encode(pub[0], 16, 64) elif formt == 'bin_electrum': return encode(pub[0], 256, 32) + encode(pub[1], 256, 32) elif formt == 'hex_electrum': return encode(pub[0], 16, 64) + encode(pub[1], 16, 64) @@ -253,6 +253,9 @@ def add_privkeys(p1, p2): f1, f2 = get_privkey_format(p1), get_privkey_format(p2) return encode_privkey((decode_privkey(p1, f1) + decode_privkey(p2, f2)) % N, f1) +def mul_privkeys(p1, p2): + f1, f2 = get_privkey_format(p1), get_privkey_format(p2) + return encode_privkey((decode_privkey(p1, f1) * decode_privkey(p2, f2)) % N, f1) def multiply(pubkey, privkey): f1, f2 = get_pubkey_format(pubkey), get_privkey_format(privkey) @@ -450,12 +453,32 @@ def pubkey_to_address(pubkey, magicbyte=0): pubtoaddr = pubkey_to_address + +def is_privkey(priv): + try: + get_privkey_format(priv) + return True + except: + return False + +def is_pubkey(pubkey): + try: + get_pubkey_format(pubkey) + return True + except: + return False + +def is_address(addr): + ADDR_RE = re.compile("^[123mn][a-km-zA-HJ-NP-Z0-9]{26,33}$") + return bool(ADDR_RE.match(addr)) + + # EDCSA def encode_sig(v, r, s): vb, rb, sb = from_int_to_byte(v), encode(r, 256), encode(s, 256) - + result = base64.b64encode(vb+b'\x00'*(32-len(rb))+rb+b'\x00'*(32-len(sb))+sb) return result if is_python2 else str(result, 'utf-8') @@ -487,35 +510,59 @@ def ecdsa_raw_sign(msghash, priv): r, y = fast_multiply(G, k) s = inv(k, N) * (z + r*decode_privkey(priv)) % N - return 27+(y % 2), r, s + v, r, s = 27+((y % 2) ^ (0 if s * 2 < N else 1)), r, s if s * 2 < N else N - s + if 'compressed' in get_privkey_format(priv): + v += 4 + return v, r, s def ecdsa_sign(msg, priv): - return encode_sig(*ecdsa_raw_sign(electrum_sig_hash(msg), priv)) + v, r, s = ecdsa_raw_sign(electrum_sig_hash(msg), priv) + sig = encode_sig(v, r, s) + assert ecdsa_verify(msg, sig, + privtopub(priv)), "Bad Sig!\t %s\nv = %d\n,r = %d\ns = %d" % (sig, v, r, s) + return sig def ecdsa_raw_verify(msghash, vrs, pub): v, r, s = vrs + if not (27 <= v <= 34): + return False w = inv(s, N) z = hash_to_int(msghash) u1, u2 = z*w % N, r*w % N x, y = fast_add(fast_multiply(G, u1), fast_multiply(decode_pubkey(pub), u2)) + return bool(r == x and (r % N) and (s % N)) + - return r == x +# For BitcoinCore, (msg = addr or msg = "") be default +def ecdsa_verify_addr(msg, sig, addr): + assert is_address(addr) + Q = ecdsa_recover(msg, sig) + magic = get_version_byte(addr) + return (addr == pubtoaddr(Q, int(magic))) or (addr == pubtoaddr(compress(Q), int(magic))) def ecdsa_verify(msg, sig, pub): + if is_address(pub): + return ecdsa_verify_addr(msg, sig, pub) return ecdsa_raw_verify(electrum_sig_hash(msg), decode_sig(sig), pub) def ecdsa_raw_recover(msghash, vrs): v, r, s = vrs - + if not (27 <= v <= 34): + raise ValueError("%d must in range 27-31" % v) x = r - beta = pow(x*x*x+A*x+B, (P+1)//4, P) + xcubedaxb = (x*x*x+A*x+B) % P + beta = pow(xcubedaxb, (P+1)//4, P) y = beta if v % 2 ^ beta % 2 else (P - beta) + # If xcubedaxb is not a quadratic residue, then r cannot be the x coord + # for a point on the curve, and so the sig is invalid + if (xcubedaxb - y*y) % P != 0 or not (r % N) or not (s % N): + return False z = hash_to_int(msghash) Gz = jacobian_multiply((Gx, Gy, 1), (N - z) % N) XY = jacobian_multiply((x, y, 1), s) @@ -523,10 +570,12 @@ def ecdsa_raw_recover(msghash, vrs): Q = jacobian_multiply(Qr, inv(r, N)) Q = from_jacobian(Q) - if ecdsa_raw_verify(msghash, vrs, Q): - return Q - return False + # if ecdsa_raw_verify(msghash, vrs, Q): + return Q + # return False def ecdsa_recover(msg, sig): - return encode_pubkey(ecdsa_raw_recover(electrum_sig_hash(msg), decode_sig(sig)), 'hex') + v,r,s = decode_sig(sig) + Q = ecdsa_raw_recover(electrum_sig_hash(msg), (v,r,s)) + return encode_pubkey(Q, 'hex_compressed') if v >= 31 else encode_pubkey(Q, 'hex') diff --git a/src/lib/pybitcointools/bitcoin/mnemonic.py b/src/lib/pybitcointools/bitcoin/mnemonic.py new file mode 100644 index 000000000..a9df36177 --- /dev/null +++ b/src/lib/pybitcointools/bitcoin/mnemonic.py @@ -0,0 +1,127 @@ +import hashlib +import os.path +import binascii +import random +from bisect import bisect_left + +wordlist_english=list(open(os.path.join(os.path.dirname(os.path.realpath(__file__)),'english.txt'),'r')) + +def eint_to_bytes(entint,entbits): + a=hex(entint)[2:].rstrip('L').zfill(32) + print(a) + return binascii.unhexlify(a) + +def mnemonic_int_to_words(mint,mint_num_words,wordlist=wordlist_english): + backwords=[wordlist[(mint >> (11*x)) & 0x7FF].strip() for x in range(mint_num_words)] + return backwords[::-1] + +def entropy_cs(entbytes): + entropy_size=8*len(entbytes) + checksum_size=entropy_size//32 + hd=hashlib.sha256(entbytes).hexdigest() + csint=int(hd,16) >> (256-checksum_size) + return csint,checksum_size + +def entropy_to_words(entbytes,wordlist=wordlist_english): + if(len(entbytes) < 4 or len(entbytes) % 4 != 0): + raise ValueError("The size of the entropy must be a multiple of 4 bytes (multiple of 32 bits)") + entropy_size=8*len(entbytes) + csint,checksum_size = entropy_cs(entbytes) + entint=int(binascii.hexlify(entbytes),16) + mint=(entint << checksum_size) | csint + mint_num_words=(entropy_size+checksum_size)//11 + + return mnemonic_int_to_words(mint,mint_num_words,wordlist) + +def words_bisect(word,wordlist=wordlist_english): + lo=bisect_left(wordlist,word) + hi=len(wordlist)-bisect_left(wordlist[:lo:-1],word) + + return lo,hi + +def words_split(wordstr,wordlist=wordlist_english): + def popword(wordstr,wordlist): + for fwl in range(1,9): + w=wordstr[:fwl].strip() + lo,hi=words_bisect(w,wordlist) + if(hi-lo == 1): + return w,wordstr[fwl:].lstrip() + wordlist=wordlist[lo:hi] + raise Exception("Wordstr %s not found in list" %(w)) + + words=[] + tail=wordstr + while(len(tail)): + head,tail=popword(tail,wordlist) + words.append(head) + return words + +def words_to_mnemonic_int(words,wordlist=wordlist_english): + if(isinstance(words,str)): + words=words_split(words,wordlist) + return sum([wordlist.index(w) << (11*x) for x,w in enumerate(words[::-1])]) + +def words_verify(words,wordlist=wordlist_english): + if(isinstance(words,str)): + words=words_split(words,wordlist) + + mint = words_to_mnemonic_int(words,wordlist) + mint_bits=len(words)*11 + cs_bits=mint_bits//32 + entropy_bits=mint_bits-cs_bits + eint=mint >> cs_bits + csint=mint & ((1 << cs_bits)-1) + ebytes=_eint_to_bytes(eint,entropy_bits) + return csint == entropy_cs(ebytes) + +def mnemonic_to_seed(mnemonic_phrase,passphrase=b''): + try: + from hashlib import pbkdf2_hmac + def pbkdf2_hmac_sha256(password,salt,iters=2048): + return pbkdf2_hmac(hash_name='sha512',password=password,salt=salt,iterations=iters) + except: + try: + from Crypto.Protocol.KDF import PBKDF2 + from Crypto.Hash import SHA512,HMAC + + def pbkdf2_hmac_sha256(password,salt,iters=2048): + return PBKDF2(password=password,salt=salt,dkLen=64,count=iters,prf=lambda p,s: HMAC.new(p,s,SHA512).digest()) + except: + try: + + from pbkdf2 import PBKDF2 + import hmac + def pbkdf2_hmac_sha256(password,salt,iters=2048): + return PBKDF2(password,salt, iterations=iters, macmodule=hmac, digestmodule=hashlib.sha512).read(64) + except: + raise RuntimeError("No implementation of pbkdf2 was found!") + + return pbkdf2_hmac_sha256(password=mnemonic_phrase,salt=b'mnemonic'+passphrase) + +def words_mine(prefix,entbits,satisfunction,wordlist=wordlist_english,randombits=random.getrandbits): + prefix_bits=len(prefix)*11 + mine_bits=entbits-prefix_bits + pint=words_to_mnemonic_int(prefix,wordlist) + pint<<=mine_bits + dint=randombits(mine_bits) + count=0 + while(not satisfunction(entropy_to_words(eint_to_bytes(pint+dint,entbits)))): + dint=randombits(mine_bits) + if((count & 0xFFFF) == 0): + print("Searched %f percent of the space" % (float(count)/float(1 << mine_bits))) + + return entropy_to_words(eint_to_bytes(pint+dint,entbits)) + +if __name__=="__main__": + import json + testvectors=json.load(open('vectors.json','r')) + passed=True + for v in testvectors['english']: + ebytes=binascii.unhexlify(v[0]) + w=' '.join(entropy_to_words(ebytes)) + seed=mnemonic_to_seed(w,passphrase='TREZOR') + passed = passed and w==v[1] + passed = passed and binascii.hexlify(seed)==v[2] + print("Tests %s." % ("Passed" if passed else "Failed")) + + diff --git a/src/lib/pybitcointools/bitcoin/py2specials.py b/src/lib/pybitcointools/bitcoin/py2specials.py index 4e2e42bb4..337154f3b 100644 --- a/src/lib/pybitcointools/bitcoin/py2specials.py +++ b/src/lib/pybitcointools/bitcoin/py2specials.py @@ -40,10 +40,14 @@ def changebase(string, frm, to, minlen=0): return encode(decode(string, frm), to, minlen) def bin_to_b58check(inp, magicbyte=0): - inp_fmtd = chr(int(magicbyte)) + inp - leadingzbytes = len(re.match('^\x00*', inp_fmtd).group(0)) - checksum = bin_dbl_sha256(inp_fmtd)[:4] - return '1' * leadingzbytes + changebase(inp_fmtd+checksum, 256, 58) + if magicbyte == 0: + inp = '\x00' + inp + while magicbyte > 0: + inp = chr(int(magicbyte % 256)) + inp + magicbyte //= 256 + leadingzbytes = len(re.match('^\x00*', inp).group(0)) + checksum = bin_dbl_sha256(inp)[:4] + return '1' * leadingzbytes + changebase(inp+checksum, 256, 58) def bytes_to_hex_string(b): return b.encode('hex') diff --git a/src/lib/pybitcointools/bitcoin/py3specials.py b/src/lib/pybitcointools/bitcoin/py3specials.py index be2347222..7593b9a68 100644 --- a/src/lib/pybitcointools/bitcoin/py3specials.py +++ b/src/lib/pybitcointools/bitcoin/py3specials.py @@ -38,16 +38,20 @@ def changebase(string, frm, to, minlen=0): return encode(decode(string, frm), to, minlen) def bin_to_b58check(inp, magicbyte=0): - inp_fmtd = from_int_to_byte(int(magicbyte))+inp + if magicbyte == 0: + inp = from_int_to_byte(0) + inp + while magicbyte > 0: + inp = from_int_to_byte(magicbyte % 256) + inp + magicbyte //= 256 leadingzbytes = 0 - for x in inp_fmtd: + for x in inp: if x != 0: break leadingzbytes += 1 - checksum = bin_dbl_sha256(inp_fmtd)[:4] - return '1' * leadingzbytes + changebase(inp_fmtd+checksum, 256, 58) + checksum = bin_dbl_sha256(inp)[:4] + return '1' * leadingzbytes + changebase(inp+checksum, 256, 58) def bytes_to_hex_string(b): if isinstance(b, str): diff --git a/src/lib/pybitcointools/bitcoin/transaction.py b/src/lib/pybitcointools/bitcoin/transaction.py index ec71ec9b7..4a501504f 100644 --- a/src/lib/pybitcointools/bitcoin/transaction.py +++ b/src/lib/pybitcointools/bitcoin/transaction.py @@ -9,7 +9,7 @@ def json_is_base(obj, base): if not is_python2 and isinstance(obj, bytes): return False - + alpha = get_code_string(base) if isinstance(obj, string_types): for i in range(len(obj)): @@ -58,7 +58,7 @@ def read_as_int(bytez): def read_var_int(): pos[0] += 1 - + val = from_byte_to_int(tx[pos[0]-1]) if val < 253: return val @@ -138,9 +138,9 @@ def signature_form(tx, i, script, hashcode=SIGHASH_ALL): newtx["outs"] = [] elif hashcode == SIGHASH_SINGLE: newtx["outs"] = newtx["outs"][:len(newtx["ins"])] - for out in range(len(newtx["ins"]) - 1): - out.value = 2**64 - 1 - out.script = "" + for out in newtx["outs"][:len(newtx["ins"]) - 1]: + out['value'] = 2**64 - 1 + out['script'] = "" elif hashcode == SIGHASH_ANYONECANPAY: newtx["ins"] = [newtx["ins"][i]] else: @@ -152,15 +152,14 @@ def signature_form(tx, i, script, hashcode=SIGHASH_ALL): def der_encode_sig(v, r, s): b1, b2 = safe_hexlify(encode(r, 256)), safe_hexlify(encode(s, 256)) - if r >= 2**255: + if len(b1) and b1[0] in '89abcdef': b1 = '00' + b1 - if s >= 2**255: + if len(b2) and b2[0] in '89abcdef': b2 = '00' + b2 left = '02'+encode(len(b1)//2, 16, 2)+b1 right = '02'+encode(len(b2)//2, 16, 2)+b2 return '30'+encode(len(left+right)//2, 16, 2)+left+right - def der_decode_sig(sig): leftlen = decode(sig[6:8], 16)*2 left = sig[8:8+leftlen] @@ -168,6 +167,32 @@ def der_decode_sig(sig): right = sig[12+leftlen:12+leftlen+rightlen] return (None, decode(left, 16), decode(right, 16)) +def is_bip66(sig): + """Checks hex DER sig for BIP66 consistency""" + #https://raw.githubusercontent.com/bitcoin/bips/master/bip-0066.mediawiki + #0x30 [total-len] 0x02 [R-len] [R] 0x02 [S-len] [S] [sighash] + sig = bytearray.fromhex(sig) if re.match('^[0-9a-fA-F]*$', sig) else bytearray(sig) + if (sig[0] == 0x30) and (sig[1] == len(sig)-2): # check if sighash is missing + sig.extend(b"\1") # add SIGHASH_ALL for testing + #assert (sig[-1] & 124 == 0) and (not not sig[-1]), "Bad SIGHASH value" + + if len(sig) < 9 or len(sig) > 73: return False + if (sig[0] != 0x30): return False + if (sig[1] != len(sig)-3): return False + rlen = sig[3] + if (5+rlen >= len(sig)): return False + slen = sig[5+rlen] + if (rlen + slen + 7 != len(sig)): return False + if (sig[2] != 0x02): return False + if (rlen == 0): return False + if (sig[4] & 0x80): return False + if (rlen > 1 and (sig[4] == 0x00) and not (sig[5] & 0x80)): return False + if (sig[4+rlen] != 0x02): return False + if (slen == 0): return False + if (sig[rlen+6] & 0x80): return False + if (slen > 1 and (sig[6+rlen] == 0x00) and not (sig[7+rlen] & 0x80)): + return False + return True def txhash(tx, hashcode=None): if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx): @@ -230,8 +255,11 @@ def script_to_address(script, vbyte=0): if vbyte in [111, 196]: # Testnet scripthash_byte = 196 - else: + elif vbyte == 0: + # Mainnet scripthash_byte = 5 + else: + scripthash_byte = vbyte # BIP0016 scripthash addresses return bin_to_b58check(script[2:-1], scripthash_byte) @@ -275,7 +303,7 @@ def serialize_script_unit(unit): if unit < 16: return from_int_to_byte(unit + 80) else: - return bytes([unit]) + return from_int_to_byte(unit) elif unit is None: return b'\x00' else: @@ -300,7 +328,7 @@ def serialize_script(script): if json_is_base(script, 16): return safe_hexlify(serialize_script(json_changebase(script, lambda x: binascii.unhexlify(x)))) - + result = bytes() for b in map(serialize_script_unit, script): result += b if isinstance(b, bytes) else bytes(b, 'utf-8') @@ -313,7 +341,7 @@ def mk_multisig_script(*args): # [pubs],k or pub1,pub2...pub[n],k else: pubs = list(filter(lambda x: len(str(x)) >= 32, args)) k = int(args[len(pubs)]) - return serialize_script([k]+pubs+[len(pubs)]) + 'ae' + return serialize_script([k]+pubs+[len(pubs)]+[0xae]) # Signing and verifying @@ -378,8 +406,12 @@ def apply_multisignatures(*args): if isinstance(tx, str) and re.match('^[0-9a-fA-F]*$', tx): return safe_hexlify(apply_multisignatures(binascii.unhexlify(tx), i, script, sigs)) + # Not pushing empty elements on the top of the stack if passing no + # script (in case of bare multisig inputs there is no script) + script_blob = [] if script.__len__() == 0 else [script] + txobj = deserialize(tx) - txobj["ins"][i]["script"] = serialize_script([None]+sigs+[script]) + txobj["ins"][i]["script"] = serialize_script([None]+sigs+script_blob) return serialize(txobj) diff --git a/src/lib/pybitcointools/setup.py b/src/lib/pybitcointools/setup.py index 1cd6d1f83..e01a9bfc6 100644 --- a/src/lib/pybitcointools/setup.py +++ b/src/lib/pybitcointools/setup.py @@ -5,7 +5,7 @@ from distutils.core import setup setup(name='bitcoin', - version='1.1.28', + version='1.1.42', description='Python Bitcoin Tools', author='Vitalik Buterin', author_email='vbuterin@gmail.com', @@ -13,5 +13,5 @@ packages=['bitcoin'], scripts=['pybtctool'], include_package_data=True, - data_files=[("", ["LICENSE"])], + data_files=[("", ["LICENSE"]), ("bitcoin", ["bitcoin/english.txt"])], ) diff --git a/src/lib/pybitcointools/test.py b/src/lib/pybitcointools/test.py index 2cf415d80..8fb8bdc1e 100644 --- a/src/lib/pybitcointools/test.py +++ b/src/lib/pybitcointools/test.py @@ -99,57 +99,19 @@ def test_all(self): ) -class TestElectrumSignVerify(unittest.TestCase): - """Requires Electrum.""" +class TestRawSignRecover(unittest.TestCase): @classmethod def setUpClass(cls): - cls.wallet = "/tmp/tempwallet_" + str(random.randrange(2**40)) - print("Starting wallet tests with: " + cls.wallet) - os.popen('echo "\n\n\n\n\n\n" | electrum -w %s create' % cls.wallet).read() - cls.seed = str(json.loads(os.popen("electrum -w %s getseed" % cls.wallet).read())['seed']) - cls.addies = json.loads(os.popen("electrum -w %s listaddresses" % cls.wallet).read()) + print("Basic signing and recovery tests") - def test_address(self): - for i in range(5): - self.assertEqual( - self.addies[i], - electrum_address(self.seed, i, 0), - "Address does not match! Details:\nseed %s, i: %d" % (self.seed, i) - ) - - def test_sign_verify(self): - print("Electrum-style signing and verification tests, against actual Electrum") - alphabet = "1234567890qwertyuiopasdfghjklzxcvbnm" - for i in range(8): - msg = ''.join([random.choice(alphabet) for i in range(random.randrange(20, 200))]) - addy = random.choice(self.addies) - wif = os.popen('electrum -w %s dumpprivkey %s' % (self.wallet, addy)).readlines()[-2].replace('"', '').strip() - priv = b58check_to_hex(wif) - pub = privtopub(priv) - - sig = os.popen('electrum -w %s signmessage %s %s' % (self.wallet, addy, msg)).readlines()[-1].strip() - self.assertTrue( - ecdsa_verify(msg, sig, pub), - "Verification error. Details:\nmsg: %s\nsig: %s\npriv: %s\naddy: %s\npub: %s" % ( - msg, sig, priv, addy, pub - ) - ) - - rec = ecdsa_recover(msg, sig) - self.assertEqual( - pub, - rec, - "Recovery error. Details:\nmsg: %s\nsig: %s\npriv: %s\naddy: %s\noriginal pub: %s, %s\nrecovered pub: %s" % ( - msg, sig, priv, addy, pub, decode_pubkey(pub, 'hex')[1], rec - ) - ) - - mysig = ecdsa_sign(msg, priv) + def test_all(self): + for i in range(20): + k = sha256(str(i)) + s = ecdsa_raw_sign('35' * 32, k) self.assertEqual( - os.popen('electrum -w %s verifymessage %s %s %s' % (self.wallet, addy, mysig, msg)).read().strip(), - "true", - "Electrum verify message does not match" + ecdsa_raw_recover('35' * 32, s), + decode_pubkey(privtopub(k)) ) @@ -305,7 +267,7 @@ def test_all(self): self.assertEqual( left, right, - "Test vector does not match. Details: \n%s\n%s\n\%s" % ( + r"Test vector does not match. Details: \n%s\n%s\n\%s" % ( tv[0], [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(left)], [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(right)], @@ -329,7 +291,7 @@ def test_all_testnet(self): self.assertEqual( left, right, - "Test vector does not match. Details:\n%s\n%s\n%s\n\%s" % ( + r"Test vector does not match. Details:\n%s\n%s\n%s\n\%s" % ( left, tv[0], [x.encode('hex') if isinstance(x, str) else x for x in bip32_deserialize(left)], diff --git a/src/lib/pyelliptic/cipher.py b/src/lib/pyelliptic/cipher.py index 4a76a3440..b597cafa2 100644 --- a/src/lib/pyelliptic/cipher.py +++ b/src/lib/pyelliptic/cipher.py @@ -4,7 +4,7 @@ # Copyright (C) 2011 Yann GUIBET # See LICENSE for details. -from .openssl import OpenSSL +from pyelliptic.openssl import OpenSSL class Cipher: @@ -77,5 +77,8 @@ def ciphering(self, input): return buff + self.final() def __del__(self): - OpenSSL.EVP_CIPHER_CTX_cleanup(self.ctx) + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EVP_CIPHER_CTX_reset(self.ctx) + else: + OpenSSL.EVP_CIPHER_CTX_cleanup(self.ctx) OpenSSL.EVP_CIPHER_CTX_free(self.ctx) diff --git a/src/lib/pyelliptic/ecc.py b/src/lib/pyelliptic/ecc.py index b36806df2..bea645db1 100644 --- a/src/lib/pyelliptic/ecc.py +++ b/src/lib/pyelliptic/ecc.py @@ -5,9 +5,9 @@ # See LICENSE for details. from hashlib import sha512 -from .openssl import OpenSSL -from .cipher import Cipher -from .hash import hmac_sha256, equals +from pyelliptic.openssl import OpenSSL +from pyelliptic.cipher import Cipher +from pyelliptic.hash import hmac_sha256, equals from struct import pack, unpack @@ -223,7 +223,10 @@ def raw_get_ecdh_key(self, pubkey_x, pubkey_y): if (OpenSSL.EC_KEY_set_private_key(own_key, own_priv_key)) == 0: raise Exception("[OpenSSL] EC_KEY_set_private_key FAIL ...") - OpenSSL.ECDH_set_method(own_key, OpenSSL.ECDH_OpenSSL()) + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EC_KEY_set_method(own_key, OpenSSL.EC_KEY_OpenSSL()) + else: + OpenSSL.ECDH_set_method(own_key, OpenSSL.ECDH_OpenSSL()) ecdh_keylen = OpenSSL.ECDH_compute_key( ecdh_keybuffer, 32, other_pub_key, own_key, 0) @@ -299,7 +302,7 @@ def raw_check_key(self, privkey, pubkey_x, pubkey_y, curve=None): if privkey is not None: OpenSSL.BN_free(priv_key) - def sign(self, inputb, digest_alg=OpenSSL.EVP_ecdsa): + def sign(self, inputb, digest_alg=OpenSSL.digest_ecdsa_sha1): """ Sign the input with ECDSA method and returns the signature """ @@ -307,7 +310,10 @@ def sign(self, inputb, digest_alg=OpenSSL.EVP_ecdsa): size = len(inputb) buff = OpenSSL.malloc(inputb, size) digest = OpenSSL.malloc(0, 64) - md_ctx = OpenSSL.EVP_MD_CTX_create() + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + md_ctx = OpenSSL.EVP_MD_CTX_new() + else: + md_ctx = OpenSSL.EVP_MD_CTX_create() dgst_len = OpenSSL.pointer(OpenSSL.c_int(0)) siglen = OpenSSL.pointer(OpenSSL.c_int(0)) sig = OpenSSL.malloc(0, 151) @@ -337,7 +343,10 @@ def sign(self, inputb, digest_alg=OpenSSL.EVP_ecdsa): if (OpenSSL.EC_KEY_check_key(key)) == 0: raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") - OpenSSL.EVP_MD_CTX_init(md_ctx) + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EVP_MD_CTX_new(md_ctx) + else: + OpenSSL.EVP_MD_CTX_init(md_ctx) OpenSSL.EVP_DigestInit_ex(md_ctx, digest_alg(), None) if (OpenSSL.EVP_DigestUpdate(md_ctx, buff, size)) == 0: @@ -356,9 +365,13 @@ def sign(self, inputb, digest_alg=OpenSSL.EVP_ecdsa): OpenSSL.BN_free(pub_key_y) OpenSSL.BN_free(priv_key) OpenSSL.EC_POINT_free(pub_key) - OpenSSL.EVP_MD_CTX_destroy(md_ctx) + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EVP_MD_CTX_free(md_ctx) + else: + OpenSSL.EVP_MD_CTX_destroy(md_ctx) + pass - def verify(self, sig, inputb, digest_alg=OpenSSL.EVP_ecdsa): + def verify(self, sig, inputb, digest_alg=OpenSSL.digest_ecdsa_sha1): """ Verify the signature with the input and the local public key. Returns a boolean @@ -368,8 +381,10 @@ def verify(self, sig, inputb, digest_alg=OpenSSL.EVP_ecdsa): binputb = OpenSSL.malloc(inputb, len(inputb)) digest = OpenSSL.malloc(0, 64) dgst_len = OpenSSL.pointer(OpenSSL.c_int(0)) - md_ctx = OpenSSL.EVP_MD_CTX_create() - + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + md_ctx = OpenSSL.EVP_MD_CTX_new() + else: + md_ctx = OpenSSL.EVP_MD_CTX_create() key = OpenSSL.EC_KEY_new_by_curve_name(self.curve) if key == 0: @@ -390,8 +405,10 @@ def verify(self, sig, inputb, digest_alg=OpenSSL.EVP_ecdsa): raise Exception("[OpenSSL] EC_KEY_set_public_key FAIL ...") if (OpenSSL.EC_KEY_check_key(key)) == 0: raise Exception("[OpenSSL] EC_KEY_check_key FAIL ...") - - OpenSSL.EVP_MD_CTX_init(md_ctx) + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EVP_MD_CTX_new(md_ctx) + else: + OpenSSL.EVP_MD_CTX_init(md_ctx) OpenSSL.EVP_DigestInit_ex(md_ctx, digest_alg(), None) if (OpenSSL.EVP_DigestUpdate(md_ctx, binputb, len(inputb))) == 0: raise Exception("[OpenSSL] EVP_DigestUpdate FAIL ...") @@ -414,7 +431,10 @@ def verify(self, sig, inputb, digest_alg=OpenSSL.EVP_ecdsa): OpenSSL.BN_free(pub_key_x) OpenSSL.BN_free(pub_key_y) OpenSSL.EC_POINT_free(pub_key) - OpenSSL.EVP_MD_CTX_destroy(md_ctx) + if OpenSSL._hexversion > 0x10100000 and not OpenSSL._libreSSL: + OpenSSL.EVP_MD_CTX_free(md_ctx) + else: + OpenSSL.EVP_MD_CTX_destroy(md_ctx) @staticmethod def encrypt(data, pubkey, ephemcurve=None, ciphername='aes-256-cbc'): diff --git a/src/lib/pyelliptic/hash.py b/src/lib/pyelliptic/hash.py index d6a15811e..fb910dd4e 100644 --- a/src/lib/pyelliptic/hash.py +++ b/src/lib/pyelliptic/hash.py @@ -4,7 +4,7 @@ # Copyright (C) 2011 Yann GUIBET # See LICENSE for details. -from .openssl import OpenSSL +from pyelliptic.openssl import OpenSSL # For python3 diff --git a/src/lib/pyelliptic/openssl.py b/src/lib/pyelliptic/openssl.py index 129537885..6043f4044 100644 --- a/src/lib/pyelliptic/openssl.py +++ b/src/lib/pyelliptic/openssl.py @@ -8,8 +8,6 @@ import sys import ctypes -import logging -import os OpenSSL = None @@ -33,6 +31,37 @@ def get_blocksize(self): return self._blocksize +def get_version(library): + version = None + hexversion = None + cflags = None + try: + #OpenSSL 1.1 + OPENSSL_VERSION = 0 + OPENSSL_CFLAGS = 1 + library.OpenSSL_version.argtypes = [ctypes.c_int] + library.OpenSSL_version.restype = ctypes.c_char_p + version = library.OpenSSL_version(OPENSSL_VERSION) + cflags = library.OpenSSL_version(OPENSSL_CFLAGS) + library.OpenSSL_version_num.restype = ctypes.c_long + hexversion = library.OpenSSL_version_num() + except AttributeError: + try: + #OpenSSL 1.0 + SSLEAY_VERSION = 0 + SSLEAY_CFLAGS = 2 + library.SSLeay.restype = ctypes.c_long + library.SSLeay_version.restype = ctypes.c_char_p + library.SSLeay_version.argtypes = [ctypes.c_int] + version = library.SSLeay_version(SSLEAY_VERSION) + cflags = library.SSLeay_version(SSLEAY_CFLAGS) + hexversion = library.SSLeay() + except AttributeError: + #raise NotImplementedError('Cannot determine version of this OpenSSL library.') + pass + return (version, hexversion, cflags) + + class _OpenSSL: """ Wrapper for OpenSSL using ctypes @@ -42,6 +71,8 @@ def __init__(self, library): Build the wrapper """ self._lib = ctypes.CDLL(library) + self._version, self._hexversion, self._cflags = get_version(self._lib) + self._libreSSL = self._version.startswith("LibreSSL") self.pointer = ctypes.pointer self.c_int = ctypes.c_int @@ -140,18 +171,27 @@ def __init__(self, library): self.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p] - self.ECDH_OpenSSL = self._lib.ECDH_OpenSSL - self._lib.ECDH_OpenSSL.restype = ctypes.c_void_p - self._lib.ECDH_OpenSSL.argtypes = [] + if self._hexversion >= 0x10100000 and not self._libreSSL: + self.EC_KEY_OpenSSL = self._lib.EC_KEY_OpenSSL + self._lib.EC_KEY_OpenSSL.restype = ctypes.c_void_p + self._lib.EC_KEY_OpenSSL.argtypes = [] + + self.EC_KEY_set_method = self._lib.EC_KEY_set_method + self._lib.EC_KEY_set_method.restype = ctypes.c_int + self._lib.EC_KEY_set_method.argtypes = [ctypes.c_void_p, ctypes.c_void_p] + else: + self.ECDH_OpenSSL = self._lib.ECDH_OpenSSL + self._lib.ECDH_OpenSSL.restype = ctypes.c_void_p + self._lib.ECDH_OpenSSL.argtypes = [] + + self.ECDH_set_method = self._lib.ECDH_set_method + self._lib.ECDH_set_method.restype = ctypes.c_int + self._lib.ECDH_set_method.argtypes = [ctypes.c_void_p, ctypes.c_void_p] self.BN_CTX_new = self._lib.BN_CTX_new self._lib.BN_CTX_new.restype = ctypes.c_void_p self._lib.BN_CTX_new.argtypes = [] - self.ECDH_set_method = self._lib.ECDH_set_method - self._lib.ECDH_set_method.restype = ctypes.c_int - self._lib.ECDH_set_method.argtypes = [ctypes.c_void_p, ctypes.c_void_p] - self.ECDH_compute_key = self._lib.ECDH_compute_key self.ECDH_compute_key.restype = ctypes.c_int self.ECDH_compute_key.argtypes = [ctypes.c_void_p, @@ -211,9 +251,14 @@ def __init__(self, library): self.EVP_rc4.restype = ctypes.c_void_p self.EVP_rc4.argtypes = [] - self.EVP_CIPHER_CTX_cleanup = self._lib.EVP_CIPHER_CTX_cleanup - self.EVP_CIPHER_CTX_cleanup.restype = ctypes.c_int - self.EVP_CIPHER_CTX_cleanup.argtypes = [ctypes.c_void_p] + if self._hexversion >= 0x10100000 and not self._libreSSL: + self.EVP_CIPHER_CTX_reset = self._lib.EVP_CIPHER_CTX_reset + self.EVP_CIPHER_CTX_reset.restype = ctypes.c_int + self.EVP_CIPHER_CTX_reset.argtypes = [ctypes.c_void_p] + else: + self.EVP_CIPHER_CTX_cleanup = self._lib.EVP_CIPHER_CTX_cleanup + self.EVP_CIPHER_CTX_cleanup.restype = ctypes.c_int + self.EVP_CIPHER_CTX_cleanup.argtypes = [ctypes.c_void_p] self.EVP_CIPHER_CTX_free = self._lib.EVP_CIPHER_CTX_free self.EVP_CIPHER_CTX_free.restype = None @@ -252,10 +297,6 @@ def __init__(self, library): self.EVP_DigestFinal_ex.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p] - self.EVP_ecdsa = self._lib.EVP_ecdsa - self._lib.EVP_ecdsa.restype = ctypes.c_void_p - self._lib.EVP_ecdsa.argtypes = [] - self.ECDSA_sign = self._lib.ECDSA_sign self.ECDSA_sign.restype = ctypes.c_int self.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, @@ -266,23 +307,47 @@ def __init__(self, library): self.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p] - self.EVP_MD_CTX_create = self._lib.EVP_MD_CTX_create - self.EVP_MD_CTX_create.restype = ctypes.c_void_p - self.EVP_MD_CTX_create.argtypes = [] + if self._hexversion >= 0x10100000 and not self._libreSSL: + self.EVP_MD_CTX_new = self._lib.EVP_MD_CTX_new + self.EVP_MD_CTX_new.restype = ctypes.c_void_p + self.EVP_MD_CTX_new.argtypes = [] + + self.EVP_MD_CTX_reset = self._lib.EVP_MD_CTX_reset + self.EVP_MD_CTX_reset.restype = None + self.EVP_MD_CTX_reset.argtypes = [ctypes.c_void_p] - self.EVP_MD_CTX_init = self._lib.EVP_MD_CTX_init - self.EVP_MD_CTX_init.restype = None - self.EVP_MD_CTX_init.argtypes = [ctypes.c_void_p] + self.EVP_MD_CTX_free = self._lib.EVP_MD_CTX_free + self.EVP_MD_CTX_free.restype = None + self.EVP_MD_CTX_free.argtypes = [ctypes.c_void_p] - self.EVP_MD_CTX_destroy = self._lib.EVP_MD_CTX_destroy - self.EVP_MD_CTX_destroy.restype = None - self.EVP_MD_CTX_destroy.argtypes = [ctypes.c_void_p] + self.EVP_sha1 = self._lib.EVP_sha1 + self.EVP_sha1.restype = ctypes.c_void_p + self.EVP_sha1.argtypes = [] + + self.digest_ecdsa_sha1 = self.EVP_sha1 + else: + self.EVP_MD_CTX_create = self._lib.EVP_MD_CTX_create + self.EVP_MD_CTX_create.restype = ctypes.c_void_p + self.EVP_MD_CTX_create.argtypes = [] + + self.EVP_MD_CTX_init = self._lib.EVP_MD_CTX_init + self.EVP_MD_CTX_init.restype = None + self.EVP_MD_CTX_init.argtypes = [ctypes.c_void_p] + + self.EVP_MD_CTX_destroy = self._lib.EVP_MD_CTX_destroy + self.EVP_MD_CTX_destroy.restype = None + self.EVP_MD_CTX_destroy.argtypes = [ctypes.c_void_p] + + self.EVP_ecdsa = self._lib.EVP_ecdsa + self._lib.EVP_ecdsa.restype = ctypes.c_void_p + self._lib.EVP_ecdsa.argtypes = [] + + self.digest_ecdsa_sha1 = self.EVP_ecdsa self.RAND_bytes = self._lib.RAND_bytes self.RAND_bytes.restype = ctypes.c_int self.RAND_bytes.argtypes = [ctypes.c_void_p, ctypes.c_int] - self.EVP_sha256 = self._lib.EVP_sha256 self.EVP_sha256.restype = ctypes.c_void_p self.EVP_sha256.argtypes = [] @@ -429,31 +494,11 @@ def malloc(self, data, size): buffer = self.create_string_buffer(size) return buffer - -def openLibrary(): +def loadOpenSSL(): + import logging + import util.SslPatch global OpenSSL - try: - if sys.platform.startswith("win"): - dll_path = "src/lib/opensslVerify/libeay32.dll" - elif sys.platform == "cygwin": - dll_path = "/bin/cygcrypto-1.0.0.dll" - elif os.path.isfile("../lib/libcrypto.so"): # ZeroBundle - dll_path = "../lib/libcrypto.so" - else: - dll_path = "/usr/local/ssl/lib/libcrypto.so" - ssl = _OpenSSL(dll_path) - assert ssl - except Exception, err: - ssl = _OpenSSL(ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') or 'libeay32') - OpenSSL = ssl - logging.debug("pyelliptic loaded: %s", ssl._lib) - - -def closeLibrary(): - import _ctypes - if "FreeLibrary" in dir(_ctypes): - _ctypes.FreeLibrary(OpenSSL._lib._handle) - else: - _ctypes.dlclose(OpenSSL._lib._handle) - -openLibrary() + OpenSSL = _OpenSSL(util.SslPatch.getLibraryPath()) + logging.debug("pyelliptic loaded: %s", OpenSSL._lib) + +loadOpenSSL() diff --git a/src/lib/rsa/CHANGELOG.txt b/src/lib/rsa/CHANGELOG.txt deleted file mode 100644 index 2d8f5cf97..000000000 --- a/src/lib/rsa/CHANGELOG.txt +++ /dev/null @@ -1,55 +0,0 @@ -Python-RSA changelog -======================================== - -Version 3.1.1 - in development ----------------------------------------- - -- Fixed doctests for Python 2.7 -- Removed obsolete unittest so all tests run fine on Python 3.2 - -Version 3.1 - released 2012-06-17 ----------------------------------------- - -- Big, big credits to Yesudeep Mangalapilly for all the changes listed - below! -- Added ability to generate keys on multiple cores simultaneously. -- Massive speedup -- Partial Python 3.2 compatibility (core functionality works, but - saving or loading keys doesn't, for that the pyasn1 package needs to - be ported to Python 3 first) -- Lots of bug fixes - - - -Version 3.0.1 - released 2011-08-07 ----------------------------------------- - -- Removed unused import of abc module - - -Version 3.0 - released 2011-08-05 ----------------------------------------- - -- Changed the meaning of the keysize to mean the size of ``n`` rather than - the size of both ``p`` and ``q``. This is the common interpretation of - RSA keysize. To get the old behaviour, double the keysize when generating a - new key. - -- Added a lot of doctests - -- Added random-padded encryption and decryption using PKCS#1 version 1.5 - -- Added hash-based signatures and verification using PKCS#1v1.5 - -- Modeling private and public key as real objects rather than dicts. - -- Support for saving and loading keys as PEM and DER files. - -- Ability to extract a public key from a private key (PEM+DER) - - -Version 2.0 ----------------------------------------- - -- Security improvements by Barry Mead. - diff --git a/src/lib/rsa/README.rst b/src/lib/rsa/README.rst deleted file mode 100644 index 9f348636d..000000000 --- a/src/lib/rsa/README.rst +++ /dev/null @@ -1,31 +0,0 @@ -Pure Python RSA implementation -============================== - -`Python-RSA`_ is a pure-Python RSA implementation. It supports -encryption and decryption, signing and verifying signatures, and key -generation according to PKCS#1 version 1.5. It can be used as a Python -library as well as on the commandline. The code was mostly written by -Sybren A. Stüvel. - -Documentation can be found at the Python-RSA homepage: -http://stuvel.eu/rsa - -Download and install using:: - - pip install rsa - -or:: - - easy_install rsa - -or download it from the `Python Package Index`_. - -The source code is maintained in a `Mercurial repository`_ and is -licensed under the `Apache License, version 2.0`_ - - -.. _`Python-RSA`: http://stuvel.eu/rsa -.. _`Mercurial repository`: https://bitbucket.org/sybren/python-rsa -.. _`Python Package Index`: http://pypi.python.org/pypi/rsa -.. _`Apache License, version 2.0`: http://www.apache.org/licenses/LICENSE-2.0 - diff --git a/src/lib/rsa/__init__.py b/src/lib/rsa/__init__.py index 99fd66893..c572c06bf 100644 --- a/src/lib/rsa/__init__.py +++ b/src/lib/rsa/__init__.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -22,24 +22,21 @@ cleartext input to prevent repetitions, or other common security improvements. Use with care. -If you want to have a more secure implementation, use the functions from the -``rsa.pkcs1`` module. - """ -__author__ = "Sybren Stuvel, Barry Mead and Yesudeep Mangalapilly" -__date__ = "2015-11-05" -__version__ = '3.2.3' - from rsa.key import newkeys, PrivateKey, PublicKey from rsa.pkcs1 import encrypt, decrypt, sign, verify, DecryptionError, \ VerificationError +__author__ = "Sybren Stuvel, Barry Mead and Yesudeep Mangalapilly" +__date__ = "2016-03-29" +__version__ = '3.4.2' + # Do doctest if we're run directly if __name__ == "__main__": import doctest + doctest.testmod() __all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify", 'PublicKey', - 'PrivateKey', 'DecryptionError', 'VerificationError'] - + 'PrivateKey', 'DecryptionError', 'VerificationError'] diff --git a/src/lib/rsa/_compat.py b/src/lib/rsa/_compat.py index 3c4eb81b1..93393d9fc 100644 --- a/src/lib/rsa/_compat.py +++ b/src/lib/rsa/_compat.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -16,7 +16,6 @@ """Python compatibility wrappers.""" - from __future__ import absolute_import import sys @@ -42,15 +41,12 @@ # Else we just assume 64-bit processor keeping up with modern times. MACHINE_WORD_SIZE = 64 - try: # < Python3 unicode_type = unicode - have_python3 = False except NameError: # Python3. unicode_type = str - have_python3 = True # Fake byte literals. if str is unicode_type: @@ -68,14 +64,6 @@ def byte_literal(s): b = byte_literal -try: - # Python 2.6 or higher. - bytes_type = bytes -except NameError: - # Python 2.5 - bytes_type = str - - # To avoid calling b() multiple times in tight loops. ZERO_BYTE = b('\x00') EMPTY_BYTE = b('') @@ -90,7 +78,7 @@ def is_bytes(obj): :returns: ``True`` if ``value`` is a byte string; ``False`` otherwise. """ - return isinstance(obj, bytes_type) + return isinstance(obj, bytes) def is_integer(obj): diff --git a/src/lib/rsa/_version133.py b/src/lib/rsa/_version133.py index dff0dda84..ff03b45f5 100644 --- a/src/lib/rsa/_version133.py +++ b/src/lib/rsa/_version133.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,8 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""RSA module -pri = k[1] //Private part of keys d,p,q +"""Deprecated version of the RSA module + +.. deprecated:: 2.0 + + This submodule is deprecated and will be completely removed as of version 4.0. Module for calculating large primes, and RSA encryption, decryption, signing and verification. Includes generating public and private keys. @@ -34,7 +37,11 @@ # NOTE: Python's modulo can return negative numbers. We compensate for # this behaviour using the abs() function -from cPickle import dumps, loads +try: + import cPickle as pickle +except ImportError: + import pickle +from pickle import dumps, loads import base64 import math import os @@ -49,6 +56,9 @@ import warnings warnings.warn('Insecure version of the RSA module is imported as %s, be careful' % __name__) +warnings.warn('This submodule is deprecated and will be completely removed as of version 4.0.', + DeprecationWarning) + def gcd(p, q): """Returns the greatest common divisor of p and q @@ -63,12 +73,6 @@ def gcd(p, q): def bytes2int(bytes): """Converts a list of bytes or a string to an integer - - >>> (128*256 + 64)*256 + + 15 - 8405007 - >>> l = [128, 64, 15] - >>> bytes2int(l) - 8405007 """ if not (type(bytes) is types.ListType or type(bytes) is types.StringType): @@ -85,9 +89,6 @@ def bytes2int(bytes): def int2bytes(number): """Converts a number to a string of bytes - - >>> bytes2int(int2bytes(123456789)) - 123456789 """ if not (type(number) is types.LongType or type(number) is types.IntType): @@ -204,11 +205,6 @@ def randomized_primality_testing(n, k): def is_prime(number): """Returns True if the number is prime, and False otherwise. - - >>> is_prime(42) - 0 - >>> is_prime(41) - 1 """ """ @@ -228,14 +224,6 @@ def is_prime(number): def getprime(nbits): """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In other words: nbits is rounded up to whole bytes. - - >>> p = getprime(8) - >>> is_prime(p-1) - 0 - >>> is_prime(p) - 1 - >>> is_prime(p+1) - 0 """ nbytes = int(math.ceil(nbits/8.)) @@ -256,11 +244,6 @@ def getprime(nbits): def are_relatively_prime(a, b): """Returns True if a and b are relatively prime, and False if they are not. - - >>> are_relatively_prime(2, 3) - 1 - >>> are_relatively_prime(2, 4) - 0 """ d = gcd(a, b) diff --git a/src/lib/rsa/_version200.py b/src/lib/rsa/_version200.py index 28f36018b..1a1694941 100644 --- a/src/lib/rsa/_version200.py +++ b/src/lib/rsa/_version200.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,14 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""RSA module +"""Deprecated version of the RSA module -Module for calculating large primes, and RSA encryption, decryption, -signing and verification. Includes generating public and private keys. +.. deprecated:: 3.0 -WARNING: this implementation does not use random padding, compression of the -cleartext input to prevent repetitions, or other common security improvements. -Use with care. + This submodule is deprecated and will be completely removed as of version 4.0. """ @@ -39,6 +36,8 @@ # Display a warning that this insecure version is imported. import warnings warnings.warn('Insecure version of the RSA module is imported as %s' % __name__) +warnings.warn('This submodule is deprecated and will be completely removed as of version 4.0.', + DeprecationWarning) def bit_size(number): @@ -59,13 +58,7 @@ def gcd(p, q): def bytes2int(bytes): - """Converts a list of bytes or a string to an integer - - >>> (((128 * 256) + 64) * 256) + 15 - 8405007 - >>> l = [128, 64, 15] - >>> bytes2int(l) #same as bytes2int('\x80@\x0f') - 8405007 + r"""Converts a list of bytes or a string to an integer """ if not (type(bytes) is types.ListType or type(bytes) is types.StringType): @@ -99,9 +92,6 @@ def int2bytes(number): def to64(number): """Converts a number in the range of 0 to 63 into base 64 digit character in the range of '0'-'9', 'A'-'Z', 'a'-'z','-','_'. - - >>> to64(10) - 'A' """ if not (type(number) is types.LongType or type(number) is types.IntType): @@ -128,9 +118,6 @@ def to64(number): def from64(number): """Converts an ordinal character value in the range of 0-9,A-Z,a-z,-,_ to a number in the range of 0-63. - - >>> from64(49) - 1 """ if not (type(number) is types.LongType or type(number) is types.IntType): @@ -157,9 +144,6 @@ def from64(number): def int2str64(number): """Converts a number to a string of base64 encoded characters in the range of '0'-'9','A'-'Z,'a'-'z','-','_'. - - >>> int2str64(123456789) - '7MyqL' """ if not (type(number) is types.LongType or type(number) is types.IntType): @@ -177,9 +161,6 @@ def int2str64(number): def str642int(string): """Converts a base64 encoded string into an integer. The chars of this string in in the range '0'-'9','A'-'Z','a'-'z','-','_' - - >>> str642int('7MyqL') - 123456789 """ if not (type(string) is types.ListType or type(string) is types.StringType): @@ -270,11 +251,6 @@ def randomized_primality_testing(n, k): def is_prime(number): """Returns True if the number is prime, and False otherwise. - - >>> is_prime(42) - 0 - >>> is_prime(41) - 1 """ if randomized_primality_testing(number, 6): @@ -288,14 +264,6 @@ def is_prime(number): def getprime(nbits): """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In other words: nbits is rounded up to whole bytes. - - >>> p = getprime(8) - >>> is_prime(p-1) - 0 - >>> is_prime(p) - 1 - >>> is_prime(p+1) - 0 """ while True: diff --git a/src/lib/rsa/asn1.py b/src/lib/rsa/asn1.py index 6eb6da53e..b724b8f53 100644 --- a/src/lib/rsa/asn1.py +++ b/src/lib/rsa/asn1.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,38 +14,40 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''ASN.1 definitions. +"""ASN.1 definitions. Not all ASN.1-handling code use these definitions, but when it does, they should be here. -''' +""" from pyasn1.type import univ, namedtype, tag + class PubKeyHeader(univ.Sequence): componentType = namedtype.NamedTypes( - namedtype.NamedType('oid', univ.ObjectIdentifier()), - namedtype.NamedType('parameters', univ.Null()), + namedtype.NamedType('oid', univ.ObjectIdentifier()), + namedtype.NamedType('parameters', univ.Null()), ) + class OpenSSLPubKey(univ.Sequence): componentType = namedtype.NamedTypes( - namedtype.NamedType('header', PubKeyHeader()), - - # This little hack (the implicit tag) allows us to get a Bit String as Octet String - namedtype.NamedType('key', univ.OctetString().subtype( - implicitTag=tag.Tag(tagClass=0, tagFormat=0, tagId=3))), + namedtype.NamedType('header', PubKeyHeader()), + + # This little hack (the implicit tag) allows us to get a Bit String as Octet String + namedtype.NamedType('key', univ.OctetString().subtype( + implicitTag=tag.Tag(tagClass=0, tagFormat=0, tagId=3))), ) class AsnPubKey(univ.Sequence): - '''ASN.1 contents of DER encoded public key: - + """ASN.1 contents of DER encoded public key: + RSAPublicKey ::= SEQUENCE { modulus INTEGER, -- n publicExponent INTEGER, -- e - ''' + """ componentType = namedtype.NamedTypes( - namedtype.NamedType('modulus', univ.Integer()), - namedtype.NamedType('publicExponent', univ.Integer()), + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()), ) diff --git a/src/lib/rsa/bigfile.py b/src/lib/rsa/bigfile.py index 516cf56b5..3a09716a8 100644 --- a/src/lib/rsa/bigfile.py +++ b/src/lib/rsa/bigfile.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,27 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Large file support +"""Large file support + +.. deprecated:: 3.4 + + The VARBLOCK format is NOT recommended for general use, has been deprecated since + Python-RSA 3.4, and will be removed in a future release. It's vulnerable to a + number of attacks: + + 1. decrypt/encrypt_bigfile() does not implement `Authenticated encryption`_ nor + uses MACs to verify messages before decrypting public key encrypted messages. + + 2. decrypt/encrypt_bigfile() does not use hybrid encryption (it uses plain RSA) + and has no method for chaining, so block reordering is possible. + + See `issue #19 on Github`_ for more information. + +.. _Authenticated encryption: https://en.wikipedia.org/wiki/Authenticated_encryption +.. _issue #19 on Github: https://github.com/sybrenstuvel/python-rsa/issues/13 + + +This module contains functions to: - break a file into smaller blocks, and encrypt them, and store the encrypted blocks in another file. @@ -37,25 +57,40 @@ This file format is called the VARBLOCK format, in line with the varint format used to denote the block sizes. -''' +""" + +import warnings from rsa import key, common, pkcs1, varblock from rsa._compat import byte + def encrypt_bigfile(infile, outfile, pub_key): - '''Encrypts a file, writing it to 'outfile' in VARBLOCK format. - + """Encrypts a file, writing it to 'outfile' in VARBLOCK format. + + .. deprecated:: 3.4 + This function was deprecated in Python-RSA version 3.4 due to security issues + in the VARBLOCK format. See the documentation_ for more information. + + .. _documentation: https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files + :param infile: file-like object to read the cleartext from :param outfile: file-like object to write the crypto in VARBLOCK format to :param pub_key: :py:class:`rsa.PublicKey` to encrypt with - ''' + """ + + warnings.warn("The 'rsa.bigfile.encrypt_bigfile' function was deprecated in Python-RSA version " + "3.4 due to security issues in the VARBLOCK format. See " + "https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files " + "for more information.", + DeprecationWarning, stacklevel=2) if not isinstance(pub_key, key.PublicKey): raise TypeError('Public key required, but got %r' % pub_key) key_bytes = common.bit_size(pub_key.n) // 8 - blocksize = key_bytes - 11 # keep space for PKCS#1 padding + blocksize = key_bytes - 11 # keep space for PKCS#1 padding # Write the version number to the VARBLOCK file outfile.write(byte(varblock.VARBLOCK_VERSION)) @@ -67,21 +102,34 @@ def encrypt_bigfile(infile, outfile, pub_key): varblock.write_varint(outfile, len(crypto)) outfile.write(crypto) + def decrypt_bigfile(infile, outfile, priv_key): - '''Decrypts an encrypted VARBLOCK file, writing it to 'outfile' - + """Decrypts an encrypted VARBLOCK file, writing it to 'outfile' + + .. deprecated:: 3.4 + This function was deprecated in Python-RSA version 3.4 due to security issues + in the VARBLOCK format. See the documentation_ for more information. + + .. _documentation: https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files + :param infile: file-like object to read the crypto in VARBLOCK format from :param outfile: file-like object to write the cleartext to :param priv_key: :py:class:`rsa.PrivateKey` to decrypt with - ''' + """ + + warnings.warn("The 'rsa.bigfile.decrypt_bigfile' function was deprecated in Python-RSA version " + "3.4 due to security issues in the VARBLOCK format. See " + "https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files " + "for more information.", + DeprecationWarning, stacklevel=2) if not isinstance(priv_key, key.PrivateKey): raise TypeError('Private key required, but got %r' % priv_key) - + for block in varblock.yield_varblocks(infile): cleartext = pkcs1.decrypt(block, priv_key) outfile.write(cleartext) -__all__ = ['encrypt_bigfile', 'decrypt_bigfile'] +__all__ = ['encrypt_bigfile', 'decrypt_bigfile'] diff --git a/src/lib/rsa/cli.py b/src/lib/rsa/cli.py index 527cc4979..3a218782e 100644 --- a/src/lib/rsa/cli.py +++ b/src/lib/rsa/cli.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,10 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Commandline scripts. +"""Commandline scripts. These scripts are called by the executables defined in setup.py. -''' +""" from __future__ import with_statement, print_function @@ -31,32 +31,33 @@ HASH_METHODS = sorted(rsa.pkcs1.HASH_METHODS.keys()) + def keygen(): - '''Key generator.''' + """Key generator.""" # Parse the CLI options parser = OptionParser(usage='usage: %prog [options] keysize', - description='Generates a new RSA keypair of "keysize" bits.') - + description='Generates a new RSA keypair of "keysize" bits.') + parser.add_option('--pubout', type='string', - help='Output filename for the public key. The public key is ' - 'not saved if this option is not present. You can use ' - 'pyrsa-priv2pub to create the public key file later.') - + help='Output filename for the public key. The public key is ' + 'not saved if this option is not present. You can use ' + 'pyrsa-priv2pub to create the public key file later.') + parser.add_option('-o', '--out', type='string', - help='Output filename for the private key. The key is ' - 'written to stdout if this option is not present.') + help='Output filename for the private key. The key is ' + 'written to stdout if this option is not present.') parser.add_option('--form', - help='key format of the private and public keys - default PEM', - choices=('PEM', 'DER'), default='PEM') + help='key format of the private and public keys - default PEM', + choices=('PEM', 'DER'), default='PEM') (cli, cli_args) = parser.parse_args(sys.argv[1:]) if len(cli_args) != 1: parser.print_help() raise SystemExit(1) - + try: keysize = int(cli_args[0]) except ValueError: @@ -67,7 +68,6 @@ def keygen(): print('Generating %i-bit key' % keysize, file=sys.stderr) (pub_key, priv_key) = rsa.newkeys(keysize) - # Save public key if cli.pubout: print('Writing public key to %s' % cli.pubout, file=sys.stderr) @@ -77,7 +77,7 @@ def keygen(): # Save private key data = priv_key.save_pkcs1(format=cli.form) - + if cli.out: print('Writing private key to %s' % cli.out, file=sys.stderr) with open(cli.out, 'wb') as outfile: @@ -88,20 +88,20 @@ def keygen(): class CryptoOperation(object): - '''CLI callable that operates with input, output, and a key.''' + """CLI callable that operates with input, output, and a key.""" __metaclass__ = abc.ABCMeta - keyname = 'public' # or 'private' + keyname = 'public' # or 'private' usage = 'usage: %%prog [options] %(keyname)s_key' description = None operation = 'decrypt' operation_past = 'decrypted' operation_progressive = 'decrypting' input_help = 'Name of the file to %(operation)s. Reads from stdin if ' \ - 'not specified.' + 'not specified.' output_help = 'Name of the file to write the %(operation_past)s file ' \ - 'to. Written to stdout if this option is not present.' + 'to. Written to stdout if this option is not present.' expected_cli_args = 1 has_output = True @@ -114,15 +114,15 @@ def __init__(self): @abc.abstractmethod def perform_operation(self, indata, key, cli_args=None): - '''Performs the program's operation. + """Performs the program's operation. Implement in a subclass. :returns: the data to write to the output. - ''' + """ def __call__(self): - '''Runs the program.''' + """Runs the program.""" (cli, cli_args) = self.parse_cli() @@ -137,21 +137,21 @@ def __call__(self): self.write_outfile(outdata, cli.output) def parse_cli(self): - '''Parse the CLI options - + """Parse the CLI options + :returns: (cli_opts, cli_args) - ''' + """ parser = OptionParser(usage=self.usage, description=self.description) - + parser.add_option('-i', '--input', type='string', help=self.input_help) if self.has_output: parser.add_option('-o', '--output', type='string', help=self.output_help) parser.add_option('--keyform', - help='Key format of the %s key - default PEM' % self.keyname, - choices=('PEM', 'DER'), default='PEM') + help='Key format of the %s key - default PEM' % self.keyname, + choices=('PEM', 'DER'), default='PEM') (cli, cli_args) = parser.parse_args(sys.argv[1:]) @@ -159,19 +159,19 @@ def parse_cli(self): parser.print_help() raise SystemExit(1) - return (cli, cli_args) + return cli, cli_args def read_key(self, filename, keyform): - '''Reads a public or private key.''' + """Reads a public or private key.""" print('Reading %s key from %s' % (self.keyname, filename), file=sys.stderr) with open(filename, 'rb') as keyfile: keydata = keyfile.read() return self.key_class.load_pkcs1(keydata, keyform) - + def read_infile(self, inname): - '''Read the input file''' + """Read the input file""" if inname: print('Reading input from %s' % inname, file=sys.stderr) @@ -182,7 +182,7 @@ def read_infile(self, inname): return sys.stdin.read() def write_outfile(self, outdata, outname): - '''Write the output file''' + """Write the output file""" if outname: print('Writing output to %s' % outname, file=sys.stderr) @@ -192,47 +192,49 @@ def write_outfile(self, outdata, outname): print('Writing output to stdout', file=sys.stderr) sys.stdout.write(outdata) + class EncryptOperation(CryptoOperation): - '''Encrypts a file.''' + """Encrypts a file.""" keyname = 'public' description = ('Encrypts a file. The file must be shorter than the key ' - 'length in order to be encrypted. For larger files, use the ' - 'pyrsa-encrypt-bigfile command.') + 'length in order to be encrypted. For larger files, use the ' + 'pyrsa-encrypt-bigfile command.') operation = 'encrypt' operation_past = 'encrypted' operation_progressive = 'encrypting' - def perform_operation(self, indata, pub_key, cli_args=None): - '''Encrypts files.''' + """Encrypts files.""" return rsa.encrypt(indata, pub_key) + class DecryptOperation(CryptoOperation): - '''Decrypts a file.''' + """Decrypts a file.""" keyname = 'private' description = ('Decrypts a file. The original file must be shorter than ' - 'the key length in order to have been encrypted. For larger ' - 'files, use the pyrsa-decrypt-bigfile command.') + 'the key length in order to have been encrypted. For larger ' + 'files, use the pyrsa-decrypt-bigfile command.') operation = 'decrypt' operation_past = 'decrypted' operation_progressive = 'decrypting' key_class = rsa.PrivateKey def perform_operation(self, indata, priv_key, cli_args=None): - '''Decrypts files.''' + """Decrypts files.""" return rsa.decrypt(indata, priv_key) + class SignOperation(CryptoOperation): - '''Signs a file.''' + """Signs a file.""" keyname = 'private' usage = 'usage: %%prog [options] private_key hash_method' description = ('Signs a file, outputs the signature. Choose the hash ' - 'method from %s' % ', '.join(HASH_METHODS)) + 'method from %s' % ', '.join(HASH_METHODS)) operation = 'sign' operation_past = 'signature' operation_progressive = 'Signing' @@ -240,25 +242,26 @@ class SignOperation(CryptoOperation): expected_cli_args = 2 output_help = ('Name of the file to write the signature to. Written ' - 'to stdout if this option is not present.') + 'to stdout if this option is not present.') def perform_operation(self, indata, priv_key, cli_args): - '''Decrypts files.''' + """Signs files.""" hash_method = cli_args[1] if hash_method not in HASH_METHODS: - raise SystemExit('Invalid hash method, choose one of %s' % - ', '.join(HASH_METHODS)) + raise SystemExit('Invalid hash method, choose one of %s' % + ', '.join(HASH_METHODS)) return rsa.sign(indata, priv_key, hash_method) + class VerifyOperation(CryptoOperation): - '''Verify a signature.''' + """Verify a signature.""" keyname = 'public' usage = 'usage: %%prog [options] public_key signature_file' description = ('Verifies a signature, exits with status 0 upon success, ' - 'prints an error message and exits with status 1 upon error.') + 'prints an error message and exits with status 1 upon error.') operation = 'verify' operation_past = 'verified' operation_progressive = 'Verifying' @@ -267,10 +270,10 @@ class VerifyOperation(CryptoOperation): has_output = False def perform_operation(self, indata, pub_key, cli_args): - '''Decrypts files.''' + """Verifies files.""" signature_file = cli_args[1] - + with open(signature_file, 'rb') as sigfile: signature = sigfile.read() @@ -283,7 +286,7 @@ def perform_operation(self, indata, pub_key, cli_args): class BigfileOperation(CryptoOperation): - '''CryptoOperation that doesn't read the entire file into memory.''' + """CryptoOperation that doesn't read the entire file into memory.""" def __init__(self): CryptoOperation.__init__(self) @@ -291,13 +294,13 @@ def __init__(self): self.file_objects = [] def __del__(self): - '''Closes any open file handles.''' + """Closes any open file handles.""" for fobj in self.file_objects: fobj.close() def __call__(self): - '''Runs the program.''' + """Runs the program.""" (cli, cli_args) = self.parse_cli() @@ -312,7 +315,7 @@ def __call__(self): self.perform_operation(infile, outfile, key, cli_args) def get_infile(self, inname): - '''Returns the input file object''' + """Returns the input file object""" if inname: print('Reading input from %s' % inname, file=sys.stderr) @@ -325,7 +328,7 @@ def get_infile(self, inname): return fobj def get_outfile(self, outname): - '''Returns the output file object''' + """Returns the output file object""" if outname: print('Will write output to %s' % outname, file=sys.stderr) @@ -337,35 +340,37 @@ def get_outfile(self, outname): return fobj + class EncryptBigfileOperation(BigfileOperation): - '''Encrypts a file to VARBLOCK format.''' + """Encrypts a file to VARBLOCK format.""" keyname = 'public' description = ('Encrypts a file to an encrypted VARBLOCK file. The file ' - 'can be larger than the key length, but the output file is only ' - 'compatible with Python-RSA.') + 'can be larger than the key length, but the output file is only ' + 'compatible with Python-RSA.') operation = 'encrypt' operation_past = 'encrypted' operation_progressive = 'encrypting' def perform_operation(self, infile, outfile, pub_key, cli_args=None): - '''Encrypts files to VARBLOCK.''' + """Encrypts files to VARBLOCK.""" return rsa.bigfile.encrypt_bigfile(infile, outfile, pub_key) + class DecryptBigfileOperation(BigfileOperation): - '''Decrypts a file in VARBLOCK format.''' + """Decrypts a file in VARBLOCK format.""" keyname = 'private' description = ('Decrypts an encrypted VARBLOCK file that was encrypted ' - 'with pyrsa-encrypt-bigfile') + 'with pyrsa-encrypt-bigfile') operation = 'decrypt' operation_past = 'decrypted' operation_progressive = 'decrypting' key_class = rsa.PrivateKey def perform_operation(self, infile, outfile, priv_key, cli_args=None): - '''Decrypts a VARBLOCK file.''' + """Decrypts a VARBLOCK file.""" return rsa.bigfile.decrypt_bigfile(infile, outfile, priv_key) @@ -376,4 +381,3 @@ def perform_operation(self, infile, outfile, priv_key, cli_args=None): verify = VerifyOperation() encrypt_bigfile = EncryptBigfileOperation() decrypt_bigfile = DecryptBigfileOperation() - diff --git a/src/lib/rsa/common.py b/src/lib/rsa/common.py index 39feb8c22..e0743340e 100644 --- a/src/lib/rsa/common.py +++ b/src/lib/rsa/common.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,19 +14,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Common functionality shared by several modules.''' +"""Common functionality shared by several modules.""" def bit_size(num): - ''' + """ Number of bits needed to represent a integer excluding any prefix 0 bits. - As per definition from http://wiki.python.org/moin/BitManipulation and + As per definition from https://wiki.python.org/moin/BitManipulation and to match the behavior of the Python 3 API. Usage:: - + >>> bit_size(1023) 10 >>> bit_size(1024) @@ -40,7 +40,7 @@ def bit_size(num): before the number's bit length is determined. :returns: Returns the number of bits in the integer. - ''' + """ if num == 0: return 0 if num < 0: @@ -51,23 +51,23 @@ def bit_size(num): hex_num = "%x" % num return ((len(hex_num) - 1) * 4) + { - '0':0, '1':1, '2':2, '3':2, - '4':3, '5':3, '6':3, '7':3, - '8':4, '9':4, 'a':4, 'b':4, - 'c':4, 'd':4, 'e':4, 'f':4, - }[hex_num[0]] + '0': 0, '1': 1, '2': 2, '3': 2, + '4': 3, '5': 3, '6': 3, '7': 3, + '8': 4, '9': 4, 'a': 4, 'b': 4, + 'c': 4, 'd': 4, 'e': 4, 'f': 4, + }[hex_num[0]] def _bit_size(number): - ''' + """ Returns the number of bits required to hold a specific long number. - ''' + """ if number < 0: raise ValueError('Only nonnegative numbers possible: %s' % number) if number == 0: return 0 - + # This works, even with very large numbers. When using math.log(number, 2), # you'll get rounding errors and it'll fail. bits = 0 @@ -79,9 +79,9 @@ def _bit_size(number): def byte_size(number): - ''' + """ Returns the number of bytes required to hold a specific long number. - + The number of bytes is rounded up. Usage:: @@ -97,17 +97,17 @@ def byte_size(number): An unsigned integer :returns: The number of bytes required to hold a specific long number. - ''' + """ quanta, mod = divmod(bit_size(number), 8) if mod or number == 0: quanta += 1 return quanta - #return int(math.ceil(bit_size(number) / 8.0)) + # return int(math.ceil(bit_size(number) / 8.0)) def extended_gcd(a, b): - '''Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb - ''' + """Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb + """ # r = gcd(a,b) i = multiplicitive inverse of a mod b # or j = multiplicitive inverse of b mod a # Neg return values for i or j are made positive mod b or a respectively @@ -116,26 +116,28 @@ def extended_gcd(a, b): y = 1 lx = 1 ly = 0 - oa = a #Remember original a/b to remove - ob = b #negative values from return results + oa = a # Remember original a/b to remove + ob = b # negative values from return results while b != 0: q = a // b - (a, b) = (b, a % b) - (x, lx) = ((lx - (q * x)),x) - (y, ly) = ((ly - (q * y)),y) - if (lx < 0): lx += ob #If neg wrap modulo orignal b - if (ly < 0): ly += oa #If neg wrap modulo orignal a - return (a, lx, ly) #Return only positive values + (a, b) = (b, a % b) + (x, lx) = ((lx - (q * x)), x) + (y, ly) = ((ly - (q * y)), y) + if lx < 0: + lx += ob # If neg wrap modulo orignal b + if ly < 0: + ly += oa # If neg wrap modulo orignal a + return a, lx, ly # Return only positive values def inverse(x, n): - '''Returns x^-1 (mod n) + """Returns x^-1 (mod n) >>> inverse(7, 4) 3 >>> (inverse(143, 4) * 143) % 4 1 - ''' + """ (divider, inv, _) = extended_gcd(x, n) @@ -146,14 +148,14 @@ def inverse(x, n): def crt(a_values, modulo_values): - '''Chinese Remainder Theorem. + """Chinese Remainder Theorem. Calculates x such that x = a[i] (mod m[i]) for each i. :param a_values: the a-values of the above equation :param modulo_values: the m-values of the above equation :returns: x such that x = a[i] (mod m[i]) for each i - + >>> crt([2, 3], [3, 5]) 8 @@ -163,10 +165,10 @@ def crt(a_values, modulo_values): >>> crt([2, 3, 0], [7, 11, 15]) 135 - ''' + """ m = 1 - x = 0 + x = 0 for modulo in modulo_values: m *= modulo @@ -179,7 +181,8 @@ def crt(a_values, modulo_values): return x + if __name__ == '__main__': import doctest - doctest.testmod() + doctest.testmod() diff --git a/src/lib/rsa/core.py b/src/lib/rsa/core.py index 90dfee8e5..b3114d9e9 100644 --- a/src/lib/rsa/core.py +++ b/src/lib/rsa/core.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,24 +14,24 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Core mathematical operations. +"""Core mathematical operations. This is the actual core RSA implementation, which is only defined mathematically on integers. -''' - +""" from rsa._compat import is_integer -def assert_int(var, name): +def assert_int(var, name): if is_integer(var): return raise TypeError('%s should be an integer, not %s' % (name, var.__class__)) + def encrypt_int(message, ekey, n): - '''Encrypts a message using encryption key 'ekey', working modulo n''' + """Encrypts a message using encryption key 'ekey', working modulo n""" assert_int(message, 'message') assert_int(ekey, 'ekey') @@ -39,15 +39,15 @@ def encrypt_int(message, ekey, n): if message < 0: raise ValueError('Only non-negative numbers are supported') - + if message > n: raise OverflowError("The message %i is too long for n=%i" % (message, n)) return pow(message, ekey, n) + def decrypt_int(cyphertext, dkey, n): - '''Decrypts a cypher text using the decryption key 'dkey', working - modulo n''' + """Decrypts a cypher text using the decryption key 'dkey', working modulo n""" assert_int(cyphertext, 'cyphertext') assert_int(dkey, 'dkey') @@ -55,4 +55,3 @@ def decrypt_int(cyphertext, dkey, n): message = pow(cyphertext, dkey, n) return message - diff --git a/src/lib/rsa/key.py b/src/lib/rsa/key.py index b6de7b3f3..64600a278 100644 --- a/src/lib/rsa/key.py +++ b/src/lib/rsa/key.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''RSA key generation code. +"""RSA key generation code. Create new keys with the newkeys() function. It will give you a PublicKey and a PrivateKey object. @@ -23,70 +23,118 @@ late as possible, such that other functionality will remain working in absence of pyasn1. -''' +.. note:: + + Storing public and private keys via the `pickle` module is possible. + However, it is insecure to load a key from an untrusted source. + The pickle module is not secure against erroneous or maliciously + constructed data. Never unpickle data received from an untrusted + or unauthenticated source. + +""" import logging -from rsa._compat import b, bytes_type +from rsa._compat import b import rsa.prime import rsa.pem import rsa.common +import rsa.randnum +import rsa.core log = logging.getLogger(__name__) - +DEFAULT_EXPONENT = 65537 class AbstractKey(object): - '''Abstract superclass for private and public keys.''' + """Abstract superclass for private and public keys.""" + + __slots__ = ('n', 'e') + + def __init__(self, n, e): + self.n = n + self.e = e @classmethod def load_pkcs1(cls, keyfile, format='PEM'): - r'''Loads a key in PKCS#1 DER or PEM format. + """Loads a key in PKCS#1 DER or PEM format. :param keyfile: contents of a DER- or PEM-encoded file that contains the public key. :param format: the format of the file to load; 'PEM' or 'DER' :return: a PublicKey object - - ''' + """ methods = { 'PEM': cls._load_pkcs1_pem, 'DER': cls._load_pkcs1_der, } - if format not in methods: - formats = ', '.join(sorted(methods.keys())) - raise ValueError('Unsupported format: %r, try one of %s' % (format, - formats)) - - method = methods[format] + method = cls._assert_format_exists(format, methods) return method(keyfile) + @staticmethod + def _assert_format_exists(file_format, methods): + """Checks whether the given file format exists in 'methods'. + """ + + try: + return methods[file_format] + except KeyError: + formats = ', '.join(sorted(methods.keys())) + raise ValueError('Unsupported format: %r, try one of %s' % (file_format, + formats)) + def save_pkcs1(self, format='PEM'): - '''Saves the public key in PKCS#1 DER or PEM format. + """Saves the public key in PKCS#1 DER or PEM format. :param format: the format to save; 'PEM' or 'DER' :returns: the DER- or PEM-encoded public key. - - ''' + """ methods = { 'PEM': self._save_pkcs1_pem, 'DER': self._save_pkcs1_der, } - if format not in methods: - formats = ', '.join(sorted(methods.keys())) - raise ValueError('Unsupported format: %r, try one of %s' % (format, - formats)) - - method = methods[format] + method = self._assert_format_exists(format, methods) return method() + def blind(self, message, r): + """Performs blinding on the message using random number 'r'. + + :param message: the message, as integer, to blind. + :type message: int + :param r: the random number to blind with. + :type r: int + :return: the blinded message. + :rtype: int + + The blinding is such that message = unblind(decrypt(blind(encrypt(message))). + + See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29 + """ + + return (message * pow(r, self.e, self.n)) % self.n + + def unblind(self, blinded, r): + """Performs blinding on the message using random number 'r'. + + :param blinded: the blinded message, as integer, to unblind. + :param r: the random number to unblind with. + :return: the original message. + + The blinding is such that message = unblind(decrypt(blind(encrypt(message))). + + See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29 + """ + + return (rsa.common.inverse(r, self.n) * blinded) % self.n + + class PublicKey(AbstractKey): - '''Represents a public RSA key. + """Represents a public RSA key. This key is also known as the 'encryption key'. It contains the 'n' and 'e' values. @@ -107,20 +155,24 @@ class PublicKey(AbstractKey): >>> key['e'] 3 - ''' + """ __slots__ = ('n', 'e') - def __init__(self, n, e): - self.n = n - self.e = e - def __getitem__(self, key): return getattr(self, key) def __repr__(self): return 'PublicKey(%i, %i)' % (self.n, self.e) + def __getstate__(self): + """Returns the key as tuple for pickling.""" + return self.n, self.e + + def __setstate__(self, state): + """Sets the key from tuple.""" + self.n, self.e = state + def __eq__(self, other): if other is None: return False @@ -135,36 +187,36 @@ def __ne__(self, other): @classmethod def _load_pkcs1_der(cls, keyfile): - r'''Loads a key in PKCS#1 DER format. + """Loads a key in PKCS#1 DER format. - @param keyfile: contents of a DER-encoded file that contains the public + :param keyfile: contents of a DER-encoded file that contains the public key. - @return: a PublicKey object + :return: a PublicKey object First let's construct a DER encoded key: >>> import base64 >>> b64der = 'MAwCBQCNGmYtAgMBAAE=' - >>> der = base64.decodestring(b64der) + >>> der = base64.standard_b64decode(b64der) This loads the file: >>> PublicKey._load_pkcs1_der(der) PublicKey(2367317549, 65537) - ''' + """ from pyasn1.codec.der import decoder from rsa.asn1 import AsnPubKey - + (priv, _) = decoder.decode(keyfile, asn1Spec=AsnPubKey()) return cls(n=int(priv['modulus']), e=int(priv['publicExponent'])) def _save_pkcs1_der(self): - '''Saves the public key in PKCS#1 DER format. + """Saves the public key in PKCS#1 DER format. @returns: the DER-encoded public key. - ''' + """ from pyasn1.codec.der import encoder from rsa.asn1 import AsnPubKey @@ -178,71 +230,70 @@ def _save_pkcs1_der(self): @classmethod def _load_pkcs1_pem(cls, keyfile): - '''Loads a PKCS#1 PEM-encoded public key file. + """Loads a PKCS#1 PEM-encoded public key file. The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and after the "-----END RSA PUBLIC KEY-----" lines is ignored. - @param keyfile: contents of a PEM-encoded file that contains the public + :param keyfile: contents of a PEM-encoded file that contains the public key. - @return: a PublicKey object - ''' + :return: a PublicKey object + """ der = rsa.pem.load_pem(keyfile, 'RSA PUBLIC KEY') return cls._load_pkcs1_der(der) def _save_pkcs1_pem(self): - '''Saves a PKCS#1 PEM-encoded public key file. + """Saves a PKCS#1 PEM-encoded public key file. - @return: contents of a PEM-encoded file that contains the public key. - ''' + :return: contents of a PEM-encoded file that contains the public key. + """ der = self._save_pkcs1_der() return rsa.pem.save_pem(der, 'RSA PUBLIC KEY') @classmethod def load_pkcs1_openssl_pem(cls, keyfile): - '''Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL. - + """Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL. + These files can be recognised in that they start with BEGIN PUBLIC KEY rather than BEGIN RSA PUBLIC KEY. - + The contents of the file before the "-----BEGIN PUBLIC KEY-----" and after the "-----END PUBLIC KEY-----" lines is ignored. - @param keyfile: contents of a PEM-encoded file that contains the public + :param keyfile: contents of a PEM-encoded file that contains the public key, from OpenSSL. - @return: a PublicKey object - ''' + :return: a PublicKey object + """ der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY') return cls.load_pkcs1_openssl_der(der) @classmethod def load_pkcs1_openssl_der(cls, keyfile): - '''Loads a PKCS#1 DER-encoded public key file from OpenSSL. + """Loads a PKCS#1 DER-encoded public key file from OpenSSL. - @param keyfile: contents of a DER-encoded file that contains the public + :param keyfile: contents of a DER-encoded file that contains the public key, from OpenSSL. - @return: a PublicKey object - ''' - + :return: a PublicKey object + + """ + from rsa.asn1 import OpenSSLPubKey from pyasn1.codec.der import decoder from pyasn1.type import univ - + (keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey()) - + if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'): raise TypeError("This is not a DER-encoded OpenSSL-compatible public key") - + return cls._load_pkcs1_der(keyinfo['key'][1:]) - - class PrivateKey(AbstractKey): - '''Represents a private RSA key. + """Represents a private RSA key. This key is also known as the 'decryption key'. It contains the 'n', 'e', 'd', 'p', 'q' and other values. @@ -253,13 +304,13 @@ class PrivateKey(AbstractKey): >>> PrivateKey(3247, 65537, 833, 191, 17) PrivateKey(3247, 65537, 833, 191, 17) - exp1, exp2 and coef don't have to be given, they will be calculated: + exp1, exp2 and coef can be given, but if None or omitted they will be calculated: - >>> pk = PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) + >>> pk = PrivateKey(3727264081, 65537, 3349121513, 65063, 57287, exp2=4) >>> pk.exp1 55063 - >>> pk.exp2 - 10095 + >>> pk.exp2 # this is of course not a correct value, but it is the one we passed. + 4 >>> pk.coef 50797 @@ -273,13 +324,12 @@ class PrivateKey(AbstractKey): >>> pk.coef 8 - ''' + """ __slots__ = ('n', 'e', 'd', 'p', 'q', 'exp1', 'exp2', 'coef') def __init__(self, n, e, d, p, q, exp1=None, exp2=None, coef=None): - self.n = n - self.e = e + AbstractKey.__init__(self, n, e) self.d = d self.p = p self.q = q @@ -290,7 +340,7 @@ def __init__(self, n, e, d, p, q, exp1=None, exp2=None, coef=None): else: self.exp1 = exp1 - if exp1 is None: + if exp2 is None: self.exp2 = int(d % (q - 1)) else: self.exp2 = exp2 @@ -306,6 +356,14 @@ def __getitem__(self, key): def __repr__(self): return 'PrivateKey(%(n)i, %(e)i, %(d)i, %(p)i, %(q)i)' % self + def __getstate__(self): + """Returns the key as tuple for pickling.""" + return self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef + + def __setstate__(self, state): + """Sets the key from tuple.""" + self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef = state + def __eq__(self, other): if other is None: return False @@ -314,37 +372,68 @@ def __eq__(self, other): return False return (self.n == other.n and - self.e == other.e and - self.d == other.d and - self.p == other.p and - self.q == other.q and - self.exp1 == other.exp1 and - self.exp2 == other.exp2 and - self.coef == other.coef) + self.e == other.e and + self.d == other.d and + self.p == other.p and + self.q == other.q and + self.exp1 == other.exp1 and + self.exp2 == other.exp2 and + self.coef == other.coef) def __ne__(self, other): return not (self == other) + def blinded_decrypt(self, encrypted): + """Decrypts the message using blinding to prevent side-channel attacks. + + :param encrypted: the encrypted message + :type encrypted: int + + :returns: the decrypted message + :rtype: int + """ + + blind_r = rsa.randnum.randint(self.n - 1) + blinded = self.blind(encrypted, blind_r) # blind before decrypting + decrypted = rsa.core.decrypt_int(blinded, self.d, self.n) + + return self.unblind(decrypted, blind_r) + + def blinded_encrypt(self, message): + """Encrypts the message using blinding to prevent side-channel attacks. + + :param message: the message to encrypt + :type message: int + + :returns: the encrypted message + :rtype: int + """ + + blind_r = rsa.randnum.randint(self.n - 1) + blinded = self.blind(message, blind_r) # blind before encrypting + encrypted = rsa.core.encrypt_int(blinded, self.d, self.n) + return self.unblind(encrypted, blind_r) + @classmethod def _load_pkcs1_der(cls, keyfile): - r'''Loads a key in PKCS#1 DER format. + """Loads a key in PKCS#1 DER format. - @param keyfile: contents of a DER-encoded file that contains the private + :param keyfile: contents of a DER-encoded file that contains the private key. - @return: a PrivateKey object + :return: a PrivateKey object First let's construct a DER encoded key: >>> import base64 >>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt' - >>> der = base64.decodestring(b64der) + >>> der = base64.standard_b64decode(b64der) This loads the file: >>> PrivateKey._load_pkcs1_der(der) PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) - ''' + """ from pyasn1.codec.der import decoder (priv, _) = decoder.decode(keyfile) @@ -352,16 +441,16 @@ def _load_pkcs1_der(cls, keyfile): # ASN.1 contents of DER encoded private key: # # RSAPrivateKey ::= SEQUENCE { - # version Version, + # version Version, # modulus INTEGER, -- n # publicExponent INTEGER, -- e # privateExponent INTEGER, -- d # prime1 INTEGER, -- p # prime2 INTEGER, -- q # exponent1 INTEGER, -- d mod (p-1) - # exponent2 INTEGER, -- d mod (q-1) + # exponent2 INTEGER, -- d mod (q-1) # coefficient INTEGER, -- (inverse of q) mod p - # otherPrimeInfos OtherPrimeInfos OPTIONAL + # otherPrimeInfos OtherPrimeInfos OPTIONAL # } if priv[0] != 0: @@ -371,25 +460,25 @@ def _load_pkcs1_der(cls, keyfile): return cls(*as_ints) def _save_pkcs1_der(self): - '''Saves the private key in PKCS#1 DER format. + """Saves the private key in PKCS#1 DER format. @returns: the DER-encoded private key. - ''' + """ from pyasn1.type import univ, namedtype from pyasn1.codec.der import encoder class AsnPrivKey(univ.Sequence): componentType = namedtype.NamedTypes( - namedtype.NamedType('version', univ.Integer()), - namedtype.NamedType('modulus', univ.Integer()), - namedtype.NamedType('publicExponent', univ.Integer()), - namedtype.NamedType('privateExponent', univ.Integer()), - namedtype.NamedType('prime1', univ.Integer()), - namedtype.NamedType('prime2', univ.Integer()), - namedtype.NamedType('exponent1', univ.Integer()), - namedtype.NamedType('exponent2', univ.Integer()), - namedtype.NamedType('coefficient', univ.Integer()), + namedtype.NamedType('version', univ.Integer()), + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()), + namedtype.NamedType('privateExponent', univ.Integer()), + namedtype.NamedType('prime1', univ.Integer()), + namedtype.NamedType('prime2', univ.Integer()), + namedtype.NamedType('exponent1', univ.Integer()), + namedtype.NamedType('exponent2', univ.Integer()), + namedtype.NamedType('coefficient', univ.Integer()), ) # Create the ASN object @@ -408,31 +497,32 @@ class AsnPrivKey(univ.Sequence): @classmethod def _load_pkcs1_pem(cls, keyfile): - '''Loads a PKCS#1 PEM-encoded private key file. + """Loads a PKCS#1 PEM-encoded private key file. The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and after the "-----END RSA PRIVATE KEY-----" lines is ignored. - @param keyfile: contents of a PEM-encoded file that contains the private + :param keyfile: contents of a PEM-encoded file that contains the private key. - @return: a PrivateKey object - ''' + :return: a PrivateKey object + """ der = rsa.pem.load_pem(keyfile, b('RSA PRIVATE KEY')) return cls._load_pkcs1_der(der) def _save_pkcs1_pem(self): - '''Saves a PKCS#1 PEM-encoded private key file. + """Saves a PKCS#1 PEM-encoded private key file. - @return: contents of a PEM-encoded file that contains the private key. - ''' + :return: contents of a PEM-encoded file that contains the private key. + """ der = self._save_pkcs1_der() return rsa.pem.save_pem(der, b('RSA PRIVATE KEY')) + def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True): - ''''Returns a tuple of two different primes of nbits bits each. - + """Returns a tuple of two different primes of nbits bits each. + The resulting p * q has exacty 2 * nbits bits, and the returned p and q will not be equal. @@ -458,9 +548,9 @@ def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True): True >>> common.bit_size(p * q) > 240 True - - ''' - + + """ + total_bits = nbits * 2 # Make sure that p and q aren't too close or the factoring programs can @@ -468,7 +558,7 @@ def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True): shift = nbits // 16 pbits = nbits + shift qbits = nbits - shift - + # Choose the two initial primes log.debug('find_p_q(%i): Finding p', nbits) p = getprime_func(pbits) @@ -476,11 +566,11 @@ def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True): q = getprime_func(qbits) def is_acceptable(p, q): - '''Returns True iff p and q are acceptable: - + """Returns True iff p and q are acceptable: + - p and q differ - (p * q) has the right nr of bits (when accurate=True) - ''' + """ if p == q: return False @@ -505,49 +595,80 @@ def is_acceptable(p, q): # We want p > q as described on # http://www.di-mgt.com.au/rsa_alg.html#crt - return (max(p, q), min(p, q)) + return max(p, q), min(p, q) -def calculate_keys(p, q, nbits): - '''Calculates an encryption and a decryption key given p and q, and - returns them as a tuple (e, d) - ''' +def calculate_keys_custom_exponent(p, q, exponent): + """Calculates an encryption and a decryption key given p, q and an exponent, + and returns them as a tuple (e, d) - phi_n = (p - 1) * (q - 1) + :param p: the first large prime + :param q: the second large prime + :param exponent: the exponent for the key; only change this if you know + what you're doing, as the exponent influences how difficult your + private key can be cracked. A very common choice for e is 65537. + :type exponent: int - # A very common choice for e is 65537 - e = 65537 + """ + + phi_n = (p - 1) * (q - 1) try: - d = rsa.common.inverse(e, phi_n) + d = rsa.common.inverse(exponent, phi_n) except ValueError: raise ValueError("e (%d) and phi_n (%d) are not relatively prime" % - (e, phi_n)) + (exponent, phi_n)) - if (e * d) % phi_n != 1: + if (exponent * d) % phi_n != 1: raise ValueError("e (%d) and d (%d) are not mult. inv. modulo " - "phi_n (%d)" % (e, d, phi_n)) + "phi_n (%d)" % (exponent, d, phi_n)) + + return exponent, d + + +def calculate_keys(p, q): + """Calculates an encryption and a decryption key given p and q, and + returns them as a tuple (e, d) - return (e, d) + :param p: the first large prime + :param q: the second large prime -def gen_keys(nbits, getprime_func, accurate=True): - '''Generate RSA keys of nbits bits. Returns (p, q, e, d). + :return: tuple (e, d) with the encryption and decryption exponents. + """ + + return calculate_keys_custom_exponent(p, q, DEFAULT_EXPONENT) + + +def gen_keys(nbits, getprime_func, accurate=True, exponent=DEFAULT_EXPONENT): + """Generate RSA keys of nbits bits. Returns (p, q, e, d). Note: this can take a long time, depending on the key size. - + :param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and ``q`` will use ``nbits/2`` bits. :param getprime_func: either :py:func:`rsa.prime.getprime` or a function with similar signature. - ''' + :param exponent: the exponent for the key; only change this if you know + what you're doing, as the exponent influences how difficult your + private key can be cracked. A very common choice for e is 65537. + :type exponent: int + """ + + # Regenerate p and q values, until calculate_keys doesn't raise a + # ValueError. + while True: + (p, q) = find_p_q(nbits // 2, getprime_func, accurate) + try: + (e, d) = calculate_keys_custom_exponent(p, q, exponent=exponent) + break + except ValueError: + pass - (p, q) = find_p_q(nbits // 2, getprime_func, accurate) - (e, d) = calculate_keys(p, q, nbits // 2) + return p, q, e, d - return (p, q, e, d) -def newkeys(nbits, accurate=True, poolsize=1): - '''Generates public and private keys, and returns them as (pub, priv). +def newkeys(nbits, accurate=True, poolsize=1, exponent=DEFAULT_EXPONENT): + """Generates public and private keys, and returns them as (pub, priv). The public key is also known as the 'encryption key', and is a :py:class:`rsa.PublicKey` object. The private key is also known as the @@ -560,13 +681,17 @@ def newkeys(nbits, accurate=True, poolsize=1): :param poolsize: the number of processes to use to generate the prime numbers. If set to a number > 1, a parallel algorithm will be used. This requires Python 2.6 or newer. + :param exponent: the exponent for the key; only change this if you know + what you're doing, as the exponent influences how difficult your + private key can be cracked. A very common choice for e is 65537. + :type exponent: int :returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`) The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires Python 2.6 or newer. - - ''' + + """ if nbits < 16: raise ValueError('Key too small') @@ -580,11 +705,12 @@ def newkeys(nbits, accurate=True, poolsize=1): import functools getprime_func = functools.partial(parallel.getprime, poolsize=poolsize) - else: getprime_func = rsa.prime.getprime + else: + getprime_func = rsa.prime.getprime # Generate the key components - (p, q, e, d) = gen_keys(nbits, getprime_func) - + (p, q, e, d) = gen_keys(nbits, getprime_func, accurate=accurate, exponent=exponent) + # Create the key objects n = p * q @@ -593,11 +719,12 @@ def newkeys(nbits, accurate=True, poolsize=1): PrivateKey(n, e, d, p, q) ) + __all__ = ['PublicKey', 'PrivateKey', 'newkeys'] if __name__ == '__main__': import doctest - + try: for count in range(100): (failures, tests) = doctest.testmod() diff --git a/src/lib/rsa/parallel.py b/src/lib/rsa/parallel.py index e5034ac70..edc924fd2 100644 --- a/src/lib/rsa/parallel.py +++ b/src/lib/rsa/parallel.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Functions for parallel computation on multiple cores. +"""Functions for parallel computation on multiple cores. Introduced in Python-RSA 3.1. @@ -22,7 +22,7 @@ Requires Python 2.6 or newer. -''' +""" from __future__ import print_function @@ -31,20 +31,19 @@ import rsa.prime import rsa.randnum + def _find_prime(nbits, pipe): while True: - integer = rsa.randnum.read_random_int(nbits) - - # Make sure it's odd - integer |= 1 + integer = rsa.randnum.read_random_odd_int(nbits) # Test for primeness if rsa.prime.is_prime(integer): pipe.send(integer) return + def getprime(nbits, poolsize): - '''Returns a prime number that can be stored in 'nbits' bits. + """Returns a prime number that can be stored in 'nbits' bits. Works in multiple threads at the same time. @@ -55,40 +54,47 @@ def getprime(nbits, poolsize): True >>> rsa.prime.is_prime(p+1) False - + >>> from rsa import common >>> common.bit_size(p) == 128 True - - ''' + + """ (pipe_recv, pipe_send) = mp.Pipe(duplex=False) # Create processes - procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send)) - for _ in range(poolsize)] - [p.start() for p in procs] - - result = pipe_recv.recv() - - [p.terminate() for p in procs] + try: + procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send)) + for _ in range(poolsize)] + # Start processes + for p in procs: + p.start() + + result = pipe_recv.recv() + finally: + pipe_recv.close() + pipe_send.close() + + # Terminate processes + for p in procs: + p.terminate() return result + __all__ = ['getprime'] - if __name__ == '__main__': print('Running doctests 1000x or until failure') import doctest - + for count in range(100): (failures, tests) = doctest.testmod() if failures: break - + if count and count % 10 == 0: print('%i times' % count) - - print('Doctests done') + print('Doctests done') diff --git a/src/lib/rsa/pem.py b/src/lib/rsa/pem.py index b1c3a0edb..0f68cb2a1 100644 --- a/src/lib/rsa/pem.py +++ b/src/lib/rsa/pem.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,15 +14,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Functions that load and write PEM-encoded files.''' +"""Functions that load and write PEM-encoded files.""" import base64 from rsa._compat import b, is_bytes + def _markers(pem_marker): - ''' + """ Returns the start and end PEM markers - ''' + """ if is_bytes(pem_marker): pem_marker = pem_marker.decode('utf-8') @@ -30,20 +31,25 @@ def _markers(pem_marker): return (b('-----BEGIN %s-----' % pem_marker), b('-----END %s-----' % pem_marker)) + def load_pem(contents, pem_marker): - '''Loads a PEM file. + """Loads a PEM file. - @param contents: the contents of the file to interpret - @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' + :param contents: the contents of the file to interpret + :param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' when your file has '-----BEGIN RSA PRIVATE KEY-----' and '-----END RSA PRIVATE KEY-----' markers. - @return the base64-decoded content between the start and end markers. + :return: the base64-decoded content between the start and end markers. @raise ValueError: when the content is invalid, for example when the start marker cannot be found. - ''' + """ + + # We want bytes, not text. If it's text, it can be converted to ASCII bytes. + if not is_bytes(contents): + contents = contents.encode('ascii') (pem_start, pem_end) = _markers(pem_marker) @@ -89,26 +95,26 @@ def load_pem(contents, pem_marker): # Base64-decode the contents pem = b('').join(pem_lines) - return base64.decodestring(pem) + return base64.standard_b64decode(pem) def save_pem(contents, pem_marker): - '''Saves a PEM file. + """Saves a PEM file. - @param contents: the contents to encode in PEM format - @param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' + :param contents: the contents to encode in PEM format + :param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' when your file has '-----BEGIN RSA PRIVATE KEY-----' and '-----END RSA PRIVATE KEY-----' markers. - @return the base64-encoded content between the start and end markers. + :return: the base64-encoded content between the start and end markers. - ''' + """ (pem_start, pem_end) = _markers(pem_marker) - b64 = base64.encodestring(contents).replace(b('\n'), b('')) + b64 = base64.standard_b64encode(contents).replace(b('\n'), b('')) pem_lines = [pem_start] - + for block_start in range(0, len(b64), 64): block = b64[block_start:block_start + 64] pem_lines.append(block) @@ -117,4 +123,3 @@ def save_pem(contents, pem_marker): pem_lines.append(b('')) return b('\n').join(pem_lines) - diff --git a/src/lib/rsa/pkcs1.py b/src/lib/rsa/pkcs1.py index 15e4cf639..28f0dc544 100644 --- a/src/lib/rsa/pkcs1.py +++ b/src/lib/rsa/pkcs1.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Functions for PKCS#1 version 1.5 encryption and signing +"""Functions for PKCS#1 version 1.5 encryption and signing This module implements certain functionality from PKCS#1 version 1.5. For a very clear example, read http://www.di-mgt.com.au/rsa_alg.html#pkcs1schemes @@ -22,17 +22,17 @@ At least 8 bytes of random padding is used when encrypting a message. This makes these methods much more secure than the ones in the ``rsa`` module. -WARNING: this module leaks information when decryption or verification fails. -The exceptions that are raised contain the Python traceback information, which -can be used to deduce where in the process the failure occurred. DO NOT PASS -SUCH INFORMATION to your users. -''' +WARNING: this module leaks information when decryption fails. The exceptions +that are raised contain the Python traceback information, which can be used to +deduce where in the process the failure occurred. DO NOT PASS SUCH INFORMATION +to your users. +""" import hashlib import os from rsa._compat import b -from rsa import common, transform, core, varblock +from rsa import common, transform, core # ASN.1 codes that describe the hash algorithm used. HASH_ASN1 = { @@ -51,133 +51,138 @@ 'SHA-512': hashlib.sha512, } + class CryptoError(Exception): - '''Base class for all exceptions in this module.''' + """Base class for all exceptions in this module.""" + class DecryptionError(CryptoError): - '''Raised when decryption fails.''' + """Raised when decryption fails.""" + class VerificationError(CryptoError): - '''Raised when verification fails.''' - + """Raised when verification fails.""" + + def _pad_for_encryption(message, target_length): - r'''Pads the message for encryption, returning the padded message. - + r"""Pads the message for encryption, returning the padded message. + :return: 00 02 RANDOM_DATA 00 MESSAGE - - >>> block = _pad_for_encryption('hello', 16) + + >>> block = _pad_for_encryption(b'hello', 16) >>> len(block) 16 >>> block[0:2] - '\x00\x02' + b'\x00\x02' >>> block[-6:] - '\x00hello' + b'\x00hello' - ''' + """ max_msglength = target_length - 11 msglength = len(message) - + if msglength > max_msglength: raise OverflowError('%i bytes needed for message, but there is only' - ' space for %i' % (msglength, max_msglength)) - + ' space for %i' % (msglength, max_msglength)) + # Get random padding padding = b('') padding_length = target_length - msglength - 3 - + # We remove 0-bytes, so we'll end up with less padding than we've asked for, # so keep adding data until we're at the correct length. while len(padding) < padding_length: needed_bytes = padding_length - len(padding) - + # Always read at least 8 bytes more than we need, and trim off the rest # after removing the 0-bytes. This increases the chance of getting # enough bytes, especially when needed_bytes is small new_padding = os.urandom(needed_bytes + 5) new_padding = new_padding.replace(b('\x00'), b('')) padding = padding + new_padding[:needed_bytes] - + assert len(padding) == padding_length - + return b('').join([b('\x00\x02'), - padding, - b('\x00'), - message]) - + padding, + b('\x00'), + message]) + def _pad_for_signing(message, target_length): - r'''Pads the message for signing, returning the padded message. - + r"""Pads the message for signing, returning the padded message. + The padding is always a repetition of FF bytes. - + :return: 00 01 PADDING 00 MESSAGE - - >>> block = _pad_for_signing('hello', 16) + + >>> block = _pad_for_signing(b'hello', 16) >>> len(block) 16 >>> block[0:2] - '\x00\x01' + b'\x00\x01' >>> block[-6:] - '\x00hello' + b'\x00hello' >>> block[2:-6] - '\xff\xff\xff\xff\xff\xff\xff\xff' - - ''' + b'\xff\xff\xff\xff\xff\xff\xff\xff' + + """ max_msglength = target_length - 11 msglength = len(message) - + if msglength > max_msglength: raise OverflowError('%i bytes needed for message, but there is only' - ' space for %i' % (msglength, max_msglength)) - + ' space for %i' % (msglength, max_msglength)) + padding_length = target_length - msglength - 3 - + return b('').join([b('\x00\x01'), - padding_length * b('\xff'), - b('\x00'), - message]) - - + padding_length * b('\xff'), + b('\x00'), + message]) + + def encrypt(message, pub_key): - '''Encrypts the given message using PKCS#1 v1.5 - + """Encrypts the given message using PKCS#1 v1.5 + :param message: the message to encrypt. Must be a byte string no longer than ``k-11`` bytes, where ``k`` is the number of bytes needed to encode the ``n`` component of the public key. :param pub_key: the :py:class:`rsa.PublicKey` to encrypt with. :raise OverflowError: when the message is too large to fit in the padded block. - + >>> from rsa import key, common >>> (pub_key, priv_key) = key.newkeys(256) - >>> message = 'hello' + >>> message = b'hello' >>> crypto = encrypt(message, pub_key) - + The crypto text should be just as long as the public key 'n' component: >>> len(crypto) == common.byte_size(pub_key.n) True - - ''' - + + """ + keylength = common.byte_size(pub_key.n) padded = _pad_for_encryption(message, keylength) - + payload = transform.bytes2int(padded) encrypted = core.encrypt_int(payload, pub_key.e, pub_key.n) block = transform.int2bytes(encrypted, keylength) - + return block + def decrypt(crypto, priv_key): - r'''Decrypts the given message using PKCS#1 v1.5 - + r"""Decrypts the given message using PKCS#1 v1.5 + The decryption is considered 'failed' when the resulting cleartext doesn't start with the bytes 00 02, or when the 00 byte between the padding and the message cannot be found. - + :param crypto: the crypto text as returned by :py:func:`rsa.encrypt` :param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with. :raise DecryptionError: when the decryption fails. No details are given as @@ -190,15 +195,15 @@ def decrypt(crypto, priv_key): It works with strings: - >>> crypto = encrypt('hello', pub_key) + >>> crypto = encrypt(b'hello', pub_key) >>> decrypt(crypto, priv_key) - 'hello' - + b'hello' + And with binary data: - >>> crypto = encrypt('\x00\x00\x00\x00\x01', pub_key) + >>> crypto = encrypt(b'\x00\x00\x00\x00\x01', pub_key) >>> decrypt(crypto, priv_key) - '\x00\x00\x00\x00\x01' + b'\x00\x00\x00\x00\x01' Altering the encrypted information will *likely* cause a :py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use @@ -213,38 +218,39 @@ def decrypt(crypto, priv_key): It's only a tiny bit of information, but every bit makes cracking the keys easier. - >>> crypto = encrypt('hello', pub_key) - >>> crypto = crypto[0:5] + 'X' + crypto[6:] # change a byte + >>> crypto = encrypt(b'hello', pub_key) + >>> crypto = crypto[0:5] + b'X' + crypto[6:] # change a byte >>> decrypt(crypto, priv_key) Traceback (most recent call last): ... - DecryptionError: Decryption failed + rsa.pkcs1.DecryptionError: Decryption failed + + """ - ''' - blocksize = common.byte_size(priv_key.n) encrypted = transform.bytes2int(crypto) - decrypted = core.decrypt_int(encrypted, priv_key.d, priv_key.n) + decrypted = priv_key.blinded_decrypt(encrypted) cleartext = transform.int2bytes(decrypted, blocksize) # If we can't find the cleartext marker, decryption failed. if cleartext[0:2] != b('\x00\x02'): raise DecryptionError('Decryption failed') - + # Find the 00 separator between the padding and the message try: sep_idx = cleartext.index(b('\x00'), 2) except ValueError: raise DecryptionError('Decryption failed') - - return cleartext[sep_idx+1:] - + + return cleartext[sep_idx + 1:] + + def sign(message, priv_key, hash): - '''Signs the message with the private key. + """Signs the message with the private key. Hashes the message, then signs the hash with the given key. This is known as a "detached signature", because the message itself isn't altered. - + :param message: the message to sign. Can be an 8-bit string or a file-like object. If ``message`` has a ``read()`` method, it is assumed to be a file-like object. @@ -255,13 +261,13 @@ def sign(message, priv_key, hash): :raise OverflowError: if the private key is too small to contain the requested hash. - ''' + """ # Get the ASN1 code for this hash method if hash not in HASH_ASN1: raise ValueError('Invalid hash method: %s' % hash) asn1code = HASH_ASN1[hash] - + # Calculate the hash hash = _hash(message, hash) @@ -269,18 +275,19 @@ def sign(message, priv_key, hash): cleartext = asn1code + hash keylength = common.byte_size(priv_key.n) padded = _pad_for_signing(cleartext, keylength) - + payload = transform.bytes2int(padded) - encrypted = core.encrypt_int(payload, priv_key.d, priv_key.n) + encrypted = priv_key.blinded_encrypt(payload) block = transform.int2bytes(encrypted, keylength) - + return block + def verify(message, signature, pub_key): - '''Verifies that the signature matches the message. - + """Verifies that the signature matches the message. + The hash method is detected automatically from the signature. - + :param message: the signed message. Can be an 8-bit string or a file-like object. If ``message`` has a ``read()`` method, it is assumed to be a file-like object. @@ -288,59 +295,49 @@ def verify(message, signature, pub_key): :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message. :raise VerificationError: when the signature doesn't match the message. - .. warning:: + """ - Never display the stack trace of a - :py:class:`rsa.pkcs1.VerificationError` exception. It shows where in - the code the exception occurred, and thus leaks information about the - key. It's only a tiny bit of information, but every bit makes cracking - the keys easier. - - ''' - - blocksize = common.byte_size(pub_key.n) + keylength = common.byte_size(pub_key.n) encrypted = transform.bytes2int(signature) decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n) - clearsig = transform.int2bytes(decrypted, blocksize) + clearsig = transform.int2bytes(decrypted, keylength) - # If we can't find the signature marker, verification failed. - if clearsig[0:2] != b('\x00\x01'): - raise VerificationError('Verification failed') - - # Find the 00 separator between the padding and the payload - try: - sep_idx = clearsig.index(b('\x00'), 2) - except ValueError: - raise VerificationError('Verification failed') - - # Get the hash and the hash method - (method_name, signature_hash) = _find_method_hash(clearsig[sep_idx+1:]) + # Get the hash method + method_name = _find_method_hash(clearsig) message_hash = _hash(message, method_name) - # Compare the real hash to the hash in the signature - if message_hash != signature_hash: + # Reconstruct the expected padded hash + cleartext = HASH_ASN1[method_name] + message_hash + expected = _pad_for_signing(cleartext, keylength) + + # Compare with the signed one + if expected != clearsig: raise VerificationError('Verification failed') return True + def _hash(message, method_name): - '''Returns the message digest. - + """Returns the message digest. + :param message: the signed message. Can be an 8-bit string or a file-like object. If ``message`` has a ``read()`` method, it is assumed to be a file-like object. :param method_name: the hash method, must be a key of :py:const:`HASH_METHODS`. - - ''' + + """ if method_name not in HASH_METHODS: raise ValueError('Invalid hash method: %s' % method_name) - + method = HASH_METHODS[method_name] hasher = method() if hasattr(message, 'read') and hasattr(message.read, '__call__'): + # Late import to prevent DeprecationWarnings. + from . import varblock + # read as 1K blocks for block in varblock.yield_fixedblocks(message, 1024): hasher.update(block) @@ -351,25 +348,18 @@ def _hash(message, method_name): return hasher.digest() -def _find_method_hash(method_hash): - '''Finds the hash method and the hash itself. - - :param method_hash: ASN1 code for the hash method concatenated with the - hash itself. - - :return: tuple (method, hash) where ``method`` is the used hash method, and - ``hash`` is the hash itself. - - :raise VerificationFailed: when the hash method cannot be found +def _find_method_hash(clearsig): + """Finds the hash method. - ''' + :param clearsig: full padded ASN1 and hash. + :return: the used hash method. + :raise VerificationFailed: when the hash method cannot be found + """ for (hashname, asn1code) in HASH_ASN1.items(): - if not method_hash.startswith(asn1code): - continue - - return (hashname, method_hash[len(asn1code):]) - + if asn1code in clearsig: + return hashname + raise VerificationError('Verification failed') @@ -379,13 +369,13 @@ def _find_method_hash(method_hash): if __name__ == '__main__': print('Running doctests 1000x or until failure') import doctest - + for count in range(1000): (failures, tests) = doctest.testmod() if failures: break - + if count and count % 100 == 0: print('%i times' % count) - + print('Doctests done') diff --git a/src/lib/rsa/prime.py b/src/lib/rsa/prime.py index 7422eb1d2..6f23f9dac 100644 --- a/src/lib/rsa/prime.py +++ b/src/lib/rsa/prime.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,102 +14,115 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Numerical functions related to primes. +"""Numerical functions related to primes. Implementation based on the book Algorithm Design by Michael T. Goodrich and Roberto Tamassia, 2002. -''' - -__all__ = [ 'getprime', 'are_relatively_prime'] +""" import rsa.randnum +__all__ = ['getprime', 'are_relatively_prime'] + + def gcd(p, q): - '''Returns the greatest common divisor of p and q + """Returns the greatest common divisor of p and q >>> gcd(48, 180) 12 - ''' + """ while q != 0: - if p < q: (p,q) = (q,p) - (p,q) = (q, p % q) + (p, q) = (q, p % q) return p - - -def jacobi(a, b): - '''Calculates the value of the Jacobi symbol (a/b) where both a and b are - positive integers, and b is odd - - :returns: -1, 0 or 1 - ''' - - assert a > 0 - assert b > 0 - - if a == 0: return 0 - result = 1 - while a > 1: - if a & 1: - if ((a-1)*(b-1) >> 2) & 1: - result = -result - a, b = b % a, a - else: - if (((b * b) - 1) >> 3) & 1: - result = -result - a >>= 1 - if a == 0: return 0 - return result - -def jacobi_witness(x, n): - '''Returns False if n is an Euler pseudo-prime with base x, and - True otherwise. - ''' - j = jacobi(x, n) % n - f = pow(x, n >> 1, n) +def miller_rabin_primality_testing(n, k): + """Calculates whether n is composite (which is always correct) or prime + (which theoretically is incorrect with error probability 4**-k), by + applying Miller-Rabin primality testing. - if j == f: return False - return True + For reference and implementation example, see: + https://en.wikipedia.org/wiki/Miller%E2%80%93Rabin_primality_test -def randomized_primality_testing(n, k): - '''Calculates whether n is composite (which is always correct) or - prime (which is incorrect with error probability 2**-k) + :param n: Integer to be tested for primality. + :type n: int + :param k: Number of rounds (witnesses) of Miller-Rabin testing. + :type k: int + :return: False if the number is composite, True if it's probably prime. + :rtype: bool + """ - Returns False if the number is composite, and True if it's - probably prime. - ''' + # prevent potential infinite loop when d = 0 + if n < 2: + return False - # 50% of Jacobi-witnesses can report compositness of non-prime numbers + # Decompose (n - 1) to write it as (2 ** r) * d + # While d is even, divide it by 2 and increase the exponent. + d = n - 1 + r = 0 - # The implemented algorithm using the Jacobi witness function has error - # probability q <= 0.5, according to Goodrich et. al - # - # q = 0.5 - # t = int(math.ceil(k / log(1 / q, 2))) - # So t = k / log(2, 2) = k / 1 = k - # this means we can use range(k) rather than range(t) + while not (d & 1): + r += 1 + d >>= 1 + # Test k witnesses. for _ in range(k): - x = rsa.randnum.randint(n-1) - if jacobi_witness(x, n): return False - + # Generate random integer a, where 2 <= a <= (n - 2) + a = rsa.randnum.randint(n - 4) + 2 + + x = pow(a, d, n) + if x == 1 or x == n - 1: + continue + + for _ in range(r - 1): + x = pow(x, 2, n) + if x == 1: + # n is composite. + return False + if x == n - 1: + # Exit inner loop and continue with next witness. + break + else: + # If loop doesn't break, n is composite. + return False + return True + def is_prime(number): - '''Returns True if the number is prime, and False otherwise. + """Returns True if the number is prime, and False otherwise. + >>> is_prime(2) + True >>> is_prime(42) False >>> is_prime(41) True - ''' + >>> [x for x in range(901, 1000) if is_prime(x)] + [907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997] + """ + + # Check for small numbers. + if number < 10: + return number in [2, 3, 5, 7] + + # Check for even numbers. + if not (number & 1): + return False + + # According to NIST FIPS 186-4, Appendix C, Table C.3, minimum number of + # rounds of M-R testing, using an error probability of 2 ** (-100), for + # different p, q bitsizes are: + # * p, q bitsize: 512; rounds: 7 + # * p, q bitsize: 1024; rounds: 4 + # * p, q bitsize: 1536; rounds: 3 + # See: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + return miller_rabin_primality_testing(number, 7) - return randomized_primality_testing(number, 6) def getprime(nbits): - '''Returns a prime number that can be stored in 'nbits' bits. + """Returns a prime number that can be stored in 'nbits' bits. >>> p = getprime(128) >>> is_prime(p-1) @@ -118,49 +131,48 @@ def getprime(nbits): True >>> is_prime(p+1) False - + >>> from rsa import common >>> common.bit_size(p) == 128 True - - ''' + """ - while True: - integer = rsa.randnum.read_random_int(nbits) + assert nbits > 3 # the loop wil hang on too small numbers - # Make sure it's odd - integer |= 1 + while True: + integer = rsa.randnum.read_random_odd_int(nbits) # Test for primeness if is_prime(integer): return integer - # Retry if not prime + # Retry if not prime def are_relatively_prime(a, b): - '''Returns True if a and b are relatively prime, and False if they + """Returns True if a and b are relatively prime, and False if they are not. >>> are_relatively_prime(2, 3) - 1 + True >>> are_relatively_prime(2, 4) - 0 - ''' + False + """ d = gcd(a, b) - return (d == 1) - + return d == 1 + + if __name__ == '__main__': print('Running doctests 1000x or until failure') import doctest - + for count in range(1000): (failures, tests) = doctest.testmod() if failures: break - + if count and count % 100 == 0: print('%i times' % count) - + print('Doctests done') diff --git a/src/lib/rsa/randnum.py b/src/lib/rsa/randnum.py index 0e782744c..3c788a570 100644 --- a/src/lib/rsa/randnum.py +++ b/src/lib/rsa/randnum.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Functions for generating random numbers.''' +"""Functions for generating random numbers.""" # Source inspired by code by Yesudeep Mangalapilly @@ -23,12 +23,13 @@ from rsa import common, transform from rsa._compat import byte + def read_random_bits(nbits): - '''Reads 'nbits' random bits. + """Reads 'nbits' random bits. If nbits isn't a whole number of bytes, an extra byte will be appended with only the lower bits set. - ''' + """ nbytes, rbits = divmod(nbits, 8) @@ -45,8 +46,8 @@ def read_random_bits(nbits): def read_random_int(nbits): - '''Reads a random integer of approximately nbits bits. - ''' + """Reads a random integer of approximately nbits bits. + """ randomdata = read_random_bits(nbits) value = transform.bytes2int(randomdata) @@ -57,13 +58,27 @@ def read_random_int(nbits): return value + +def read_random_odd_int(nbits): + """Reads a random odd integer of approximately nbits bits. + + >>> read_random_odd_int(512) & 1 + 1 + """ + + value = read_random_int(nbits) + + # Make sure it's odd + return value | 1 + + def randint(maxvalue): - '''Returns a random integer x with 1 <= x <= maxvalue - + """Returns a random integer x with 1 <= x <= maxvalue + May take a very long time in specific situations. If maxvalue needs N bits to store, the closer maxvalue is to (2 ** N) - 1, the faster this function is. - ''' + """ bit_size = common.bit_size(maxvalue) @@ -81,5 +96,3 @@ def randint(maxvalue): tries += 1 return value - - diff --git a/src/lib/rsa/transform.py b/src/lib/rsa/transform.py index c740b2d27..16061a940 100644 --- a/src/lib/rsa/transform.py +++ b/src/lib/rsa/transform.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,10 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Data transformation functions. +"""Data transformation functions. From bytes to a number, number to bytes, etc. -''' +""" from __future__ import absolute_import @@ -26,6 +26,7 @@ # Using psyco (if available) cuts down the execution time on Python 2.5 # at least by half. import psyco + psyco.full() except ImportError: pass @@ -37,32 +38,32 @@ def bytes2int(raw_bytes): - r'''Converts a list of bytes or an 8-bit string to an integer. + r"""Converts a list of bytes or an 8-bit string to an integer. When using unicode strings, encode it to some encoding like UTF8 first. >>> (((128 * 256) + 64) * 256) + 15 8405007 - >>> bytes2int('\x80@\x0f') + >>> bytes2int(b'\x80@\x0f') 8405007 - ''' + """ return int(binascii.hexlify(raw_bytes), 16) def _int2bytes(number, block_size=None): - r'''Converts a number to a string of bytes. + r"""Converts a number to a string of bytes. Usage:: >>> _int2bytes(123456789) - '\x07[\xcd\x15' + b'\x07[\xcd\x15' >>> bytes2int(_int2bytes(123456789)) 123456789 >>> _int2bytes(123456789, 6) - '\x00\x00\x07[\xcd\x15' + b'\x00\x00\x07[\xcd\x15' >>> bytes2int(_int2bytes(123456789, 128)) 123456789 @@ -78,11 +79,12 @@ def _int2bytes(number, block_size=None): @throws OverflowError when block_size is given and the number takes up more bytes than fit into the block. - ''' + """ + # Type checking if not is_integer(number): raise TypeError("You must pass an integer for 'number', not %s" % - number.__class__) + number.__class__) if number < 0: raise ValueError('Negative numbers cannot be used: %i' % number) @@ -99,7 +101,7 @@ def _int2bytes(number, block_size=None): if block_size and block_size > 0: if needed_bytes > block_size: raise OverflowError('Needed %i bytes for number, but block size ' - 'is %i' % (needed_bytes, block_size)) + 'is %i' % (needed_bytes, block_size)) # Convert the number to bytes. while number > 0: @@ -116,7 +118,7 @@ def _int2bytes(number, block_size=None): def bytes_leading(raw_bytes, needle=ZERO_BYTE): - ''' + """ Finds the number of prefixed byte occurrences in the haystack. Useful when you want to deal with padding. @@ -127,7 +129,8 @@ def bytes_leading(raw_bytes, needle=ZERO_BYTE): The byte to count. Default \000. :returns: The number of leading needle bytes. - ''' + """ + leading = 0 # Indexing keeps compatibility between Python 2.x and Python 3.x _byte = needle[0] @@ -140,7 +143,7 @@ def bytes_leading(raw_bytes, needle=ZERO_BYTE): def int2bytes(number, fill_size=None, chunk_size=None, overflow=False): - ''' + """ Convert an unsigned integer to bytes (base-256 representation):: Does not preserve leading zeros if you don't specify a chunk size or @@ -172,7 +175,8 @@ def int2bytes(number, fill_size=None, chunk_size=None, overflow=False): bytes than fit into the block. This requires the ``overflow`` argument to this function to be set to ``False`` otherwise, no error will be raised. - ''' + """ + if number < 0: raise ValueError("Number must be an unsigned integer: %d" % number) @@ -202,8 +206,8 @@ def int2bytes(number, fill_size=None, chunk_size=None, overflow=False): if fill_size and fill_size > 0: if not overflow and length > fill_size: raise OverflowError( - "Need %d bytes for number, but fill size is %d" % - (length, fill_size) + "Need %d bytes for number, but fill size is %d" % + (length, fill_size) ) raw_bytes = raw_bytes.rjust(fill_size, ZERO_BYTE) elif chunk_size and chunk_size > 0: @@ -216,5 +220,5 @@ def int2bytes(number, fill_size=None, chunk_size=None, overflow=False): if __name__ == '__main__': import doctest - doctest.testmod() + doctest.testmod() diff --git a/src/lib/rsa/util.py b/src/lib/rsa/util.py index 5bbb70be1..29d5eb121 100644 --- a/src/lib/rsa/util.py +++ b/src/lib/rsa/util.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''Utility functions.''' +"""Utility functions.""" from __future__ import with_statement, print_function @@ -23,34 +23,35 @@ import rsa.key + def private_to_public(): - '''Reads a private key and outputs the corresponding public key.''' + """Reads a private key and outputs the corresponding public key.""" # Parse the CLI options parser = OptionParser(usage='usage: %prog [options]', - description='Reads a private key and outputs the ' - 'corresponding public key. Both private and public keys use ' - 'the format described in PKCS#1 v1.5') + description='Reads a private key and outputs the ' + 'corresponding public key. Both private and public keys use ' + 'the format described in PKCS#1 v1.5') parser.add_option('-i', '--input', dest='infilename', type='string', - help='Input filename. Reads from stdin if not specified') + help='Input filename. Reads from stdin if not specified') parser.add_option('-o', '--output', dest='outfilename', type='string', - help='Output filename. Writes to stdout of not specified') + help='Output filename. Writes to stdout of not specified') parser.add_option('--inform', dest='inform', - help='key format of input - default PEM', - choices=('PEM', 'DER'), default='PEM') + help='key format of input - default PEM', + choices=('PEM', 'DER'), default='PEM') parser.add_option('--outform', dest='outform', - help='key format of output - default PEM', - choices=('PEM', 'DER'), default='PEM') + help='key format of output - default PEM', + choices=('PEM', 'DER'), default='PEM') (cli, cli_args) = parser.parse_args(sys.argv) # Read the input data if cli.infilename: - print('Reading private key from %s in %s format' % \ - (cli.infilename, cli.inform), file=sys.stderr) + print('Reading private key from %s in %s format' % + (cli.infilename, cli.inform), file=sys.stderr) with open(cli.infilename, 'rb') as infile: in_data = infile.read() else: @@ -60,7 +61,6 @@ def private_to_public(): assert type(in_data) == bytes, type(in_data) - # Take the public fields and create a public key priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform) pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e) @@ -69,13 +69,11 @@ def private_to_public(): out_data = pub_key.save_pkcs1(cli.outform) if cli.outfilename: - print('Writing public key to %s in %s format' % \ - (cli.outfilename, cli.outform), file=sys.stderr) + print('Writing public key to %s in %s format' % + (cli.outfilename, cli.outform), file=sys.stderr) with open(cli.outfilename, 'wb') as outfile: outfile.write(out_data) else: print('Writing public key to stdout in %s format' % cli.outform, file=sys.stderr) sys.stdout.write(out_data.decode('ascii')) - - diff --git a/src/lib/rsa/varblock.py b/src/lib/rsa/varblock.py index c7d96ae6a..1c8d83904 100644 --- a/src/lib/rsa/varblock.py +++ b/src/lib/rsa/varblock.py @@ -6,7 +6,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -14,7 +14,25 @@ # See the License for the specific language governing permissions and # limitations under the License. -'''VARBLOCK file support +"""VARBLOCK file support + +.. deprecated:: 3.4 + + The VARBLOCK format is NOT recommended for general use, has been deprecated since + Python-RSA 3.4, and will be removed in a future release. It's vulnerable to a + number of attacks: + + 1. decrypt/encrypt_bigfile() does not implement `Authenticated encryption`_ nor + uses MACs to verify messages before decrypting public key encrypted messages. + + 2. decrypt/encrypt_bigfile() does not use hybrid encryption (it uses plain RSA) + and has no method for chaining, so block reordering is possible. + + See `issue #19 on Github`_ for more information. + +.. _Authenticated encryption: https://en.wikipedia.org/wiki/Authenticated_encryption +.. _issue #19 on Github: https://github.com/sybrenstuvel/python-rsa/issues/13 + The VARBLOCK file format is as follows, where || denotes byte concatenation: @@ -31,25 +49,32 @@ This file format is called the VARBLOCK format, in line with the varint format used to denote the block sizes. -''' +""" -from rsa._compat import byte, b +import warnings +from rsa._compat import byte, b ZERO_BYTE = b('\x00') VARBLOCK_VERSION = 1 +warnings.warn("The 'rsa.varblock' module was deprecated in Python-RSA version " + "3.4 due to security issues in the VARBLOCK format. See " + "https://github.com/sybrenstuvel/python-rsa/issues/13 for more information.", + DeprecationWarning) + + def read_varint(infile): - '''Reads a varint from the file. + """Reads a varint from the file. When the first byte to be read indicates EOF, (0, 0) is returned. When an EOF occurs when at least one byte has been read, an EOFError exception is raised. - @param infile: the file-like object to read from. It should have a read() + :param infile: the file-like object to read from. It should have a read() method. - @returns (varint, length), the read varint and the number of read bytes. - ''' + :returns: (varint, length), the read varint and the number of read bytes. + """ varint = 0 read_bytes = 0 @@ -58,7 +83,7 @@ def read_varint(infile): char = infile.read(1) if len(char) == 0: if read_bytes == 0: - return (0, 0) + return 0, 0 raise EOFError('EOF while reading varint, value is %i so far' % varint) @@ -68,16 +93,16 @@ def read_varint(infile): read_bytes += 1 if not byte & 0x80: - return (varint, read_bytes) + return varint, read_bytes def write_varint(outfile, value): - '''Writes a varint to a file. + """Writes a varint to a file. - @param outfile: the file-like object to write to. It should have a write() + :param outfile: the file-like object to write to. It should have a write() method. - @returns the number of written bytes. - ''' + :returns: the number of written bytes. + """ # there is a big difference between 'write the value 0' (this case) and # 'there is nothing left to write' (the false-case of the while loop) @@ -89,7 +114,7 @@ def write_varint(outfile, value): written_bytes = 0 while value > 0: to_write = value & 0x7f - value = value >> 7 + value >>= 7 if value > 0: to_write |= 0x80 @@ -101,12 +126,12 @@ def write_varint(outfile, value): def yield_varblocks(infile): - '''Generator, yields each block in the input file. + """Generator, yields each block in the input file. - @param infile: file to read, is expected to have the VARBLOCK format as + :param infile: file to read, is expected to have the VARBLOCK format as described in the module's docstring. @yields the contents of each block. - ''' + """ # Check the version number first_char = infile.read(1) @@ -135,11 +160,11 @@ def yield_varblocks(infile): def yield_fixedblocks(infile, blocksize): - '''Generator, yields each block of ``blocksize`` bytes in the input file. + """Generator, yields each block of ``blocksize`` bytes in the input file. :param infile: file to read and separate in blocks. :returns: a generator that yields the contents of each block - ''' + """ while True: block = infile.read(blocksize) @@ -152,4 +177,3 @@ def yield_fixedblocks(infile, blocksize): if read_bytes < blocksize: break - diff --git a/src/lib/subtl/subtl.py b/src/lib/subtl/subtl.py index bf6acad1d..cfbe67709 100644 --- a/src/lib/subtl/subtl.py +++ b/src/lib/subtl/subtl.py @@ -1,6 +1,7 @@ ''' Based on the specification at http://bittorrent.org/beps/bep_0015.html ''' +import binascii import random import struct import time @@ -111,7 +112,7 @@ def poll_once(self): return trans def error(self, message): - print('error: {}'.format(message)) + raise Exception('error: {}'.format(message)) def _send(self, action, payload=None): if not payload: @@ -139,7 +140,7 @@ def _process_response(self, action, payload, trans): elif action == SCRAPE: return self._process_scrape(payload, trans) elif action == ERROR: - return self._proecss_error(payload, trans) + return self._process_error(payload, trans) else: raise UdpTrackerClientException( 'Unknown action response: {}'.format(action)) @@ -201,7 +202,7 @@ def _process_error(self, payload, trans): it here for the possibility. ''' self.error(payload) - return payload + return False def _generate_peer_id(self): '''http://www.bittorrent.org/beps/bep_0020.html''' diff --git a/src/lib/websocket/ChangeLog b/src/lib/websocket/ChangeLog new file mode 100644 index 000000000..f4483d1e5 --- /dev/null +++ b/src/lib/websocket/ChangeLog @@ -0,0 +1,302 @@ +ChangeLog +============ + +- 0.47.0 + + - Fix socket constructor in _open_socket to use all relevant variables from getaddrinfo. (#383) + - .send() method is very slow (#340) + - cross-platform aync multi-client solution (#375) + - Fix detecting timeouts with SSL in recv (#387) + - Fix WebSocketApp does not poll for data correctly when using SSL (#384) + - Fix Infinite ping/pong timeouts in WebSocketApp.run_forever (#395) + - Added status message when HTTP can't be upgraded to WS (#399) + +- 0.46.0 + + - fixed OSError on windows (#370) + - fixed invalid character (#379) + +- 0.45.0 + + - change license to LGP v2.1 + - allow reuse of WebsocketApp.run_forever (#365) + - Update example for python3 (#360) + - add lock to recv function (#356) + - Parse close frame response correctly when reason present (#354) + - Fix SSL: SSLV3_ALERT_HANDSHAKE_FAILURE on Debian Stretch (#353) + - Wrap socket.gaierror with subclass of WebsocketException (#352) + - Resolve a proxy issue and a connection error (#345) + - Allow empty Host header value (#369) + - Fix undefined variable (#347) + - fix: getting a value with the key 'ca_certs' in sslopt dict (#326) + +- 0.44.0 + + -renames key in sslopt dict (#326) + +- 0.43.0 + + - Unkown kwarg 'ca_cert' when calling ssl wrap_socket() (#326) + - Race condition in WebSocket ping/pong (#327) + +- 0.42.0 + + - Implement simple cookie jar(#292) + - fix: when using pppoe redial will block.(#301) + - Fix insecure_pythons list in setup.py(#304) + - Support WEBSOCKET_CLIENT_CA_BUNDLE being directory(#307) + - WebSocketPayloadException under high traffic and limited network connection(#306) + - Not working --nocert parameter in wsdump.py(#315) + - Avoid the app to block on close on certain systems (#320) + - Fix warning is not defined. (#323) + +- 0.41.0 + + - move to repository to https://github.com/websocket-client/websocket-client.git + - _send_ping warning fails due to missing reference in _logging.__all__ (#294) + +- 0.40.0 + - Fix opcode -> op_code (#286) + +- 0.39.0 + - Shuffled around example code (#256) + - _send_ping graceful error handling (#262) + - Allow closing WebSocketApp with status/reason/timeout (#265) + - Support universal wheels (#267) + - _url: Added subnet IP address matching in no_proxy host detection (#270) + - fixed Incorrect encoding in continued messages python3 (#261) + - Pass headers for websocket handshake (#271) + - setup.py: Import `logging` before calling it. (#272) + - Implemented close code 1014 (#273) + - Support CA bundle specified by environment variable (#279) + - Response header values should not be converted to lower case (#264) + +- 0.38.0 + - Exclude port 443 from host http header (#248) + - Cleanup code (#249) + - Modify a code block directive in README (#250) + - fixed ping/pong timeouet (#253) + +- 0.37.0 + - fixed failure that `websocket.create_connection` does not accept `origin` as a parameter (#246 ) + +- 0.36.0 + - added support for using custom connection class (#235) + - use Named logger (#238) + - implement ping/pong timeout (#241) + - Corrects the syntax highlight code (#243) + - fixed failure to join thread before it is started (#242) + +- 0.35.0 + - Prints timings in console (#217) + - use inspect.getfullargspec with Python 3.x (#219) + - Check that exception message is actually a string before trying for substring check (#224) + - Use pre-initialized stream socket (#226) + - fixed TypeError: cafile, capath and cadata cannot be all omitted (#227) + +- 0.34.0 + + - Change import style (#203) + - fix attribute error on the older python. (#215) + +- 0.33.0 + + - fixed timeout+ssl error handling bug on python 2.7.10 (#190) + - add proxy support to wsdump.py (#194) + - use wsaccel if available (#193) + - add support for ssl cert chains to support client certs (#195) + - fix string formatting in exception (#196) + - fix typo in README.rst (#197) + - introduce on_data callback to pass data type. (#198) + - WebSocketBadStatusException for Handshake error (#199) + - set close timeout (#192) + - Map dict to headers list (#204) + - support client certification (#207) + - security improvement during handshake (#211) + - improve logging of error from callback (#212) + +- 0.32.0 + + - fix http proxy bug (#189) + +- 0.31.0 + + - Avoid deprecated BaseException.message (#180) + - Add travis builds (#182) + - fixed wsdump to work with piped input (#183) + - fixed output of wsdump.py with python3 (#185) + - add raw mode to wsdump.py (#186) + +- 0.30.0 + + - fixed if client is behind proxy (#169) + - support SNI for python 2.7.9+ and 3.2+ (#172) + - update Host HTTP header by user. (#171) + - fix typo for isEnabledFor (#173) + - can set verify_mode to CERT_NONE when check_hostname is enabled.(#175) + - make websockets iterable (#178) + +- 0.29.0 + + - fixed ssl socket bug + +- 0.28.0 + + - Fix erroneous argument shadowing(#168) + +- 0.27.0 + + - remove unittest2 requirements for python 2.6 (#156) + - fixed subprotocol case during header validation (#158) + - get response status and headers (#160) + - fix out-of-memory due to fragmentation when receiving a very large frame(#163) + - fix error if the payload data is nothing.(#166) + - refactoring. + +- 0.26.0 + + - all WebSocketException provide message string (#152) + - fixed tests fail when not connected to the network (#155) + - Add command line options and handle closed socket to wsdump.py (#153) + +- 0.25.0 + + - fixed for Python 2.6(#151) + +- 0.24.0 + + - Supporting http-basic auth in WebSocketApp (#143) + - fix failure of test.testInternalRecvStrict(#141) + - skip utf8 validation by skip_utf8_validation argument (#137) + - WebsocketProxyException will be raised if we got error about proxy.(#138) + +- 0.23.0 + + - Remove spurious print statement. (#135) + +- 0.22.0 + + - Fix not thread-safe of Websocket.close() (#120) + - Try to get proxy info from environment if not explicitly provided (#124) + - support proxy basic authentication. (#125) + - Fix NoneType exception at WebsocketApp.send (#126) + - not use proxy for localhost (#132) + +- 0.21.0 + + - Check for socket before attempting to close (#115) + - Enable turning off SSL verification in wsdump.py(#116) + - Enable to set subprotocol(#118) + - Better support for Autobahn test suite (http://autobahn.ws/testsuite) (#117) + +- v0.20.0 + + - fix typo. + +- v0.19.0 + + - suppress close event message(#107) + - detect socket connection state(#109) + - support for code and reason in on_close callback(#111) + - continuation frame handling seems suspicious(#113) + +- v0.18.0 + + - allow override of match_hostname usage on ssl (#105) + +- v0.17.0 + + - can't set timeout on a standing websocket connection (#102) + - fixed local variable 'error' referenced before assignment (#102, #98) + +- v0.16.0 + + - lock some method for multithread. (#92) + - disable cert verification. (#89) + +- v0.15.0 + + - fixed exception when send a large message (#84) + +- v0.14.1 + + - fixed to work on Python2.6 (#83) + +- v0.14.0 + + - Support python 3(#73) + - Support IPv6(#77) + - Support explicit web proxy(#57) + - specify cookie in connect method option(#82) + +- v0.13.0 + + - MemoryError when receiving large amount of data (~60 MB) at once(ISSUE#59) + - Controlling fragmentation(ISSUE#55) + - server certificate validation(ISSUE#56) + - PyPI tarball is missing test_websocket.py(ISSUE#65) + - Payload length encoding bug(ISSUE#58) + - disable Nagle algorithm by default(ISSUE#41) + - Better event loop in WebSocketApp(ISSUE#63) + - Skip tests that require Internet access by default(ISSUE#66) + +- v0.12.0 + + - support keep alive for WebSocketApp(ISSUE#34) + - fix some SSL bugs(ISSUE#35, #36) + - fix "Timing out leaves websocket library in bad state"(ISSUE#37) + - fix "WebSocketApp.run_with_no_err() silently eats all exceptions"(ISSUE#38) + - WebSocketTimeoutException will be raised for ws/wss timeout(ISSUE#40) + - improve wsdump message(ISSUE#42) + - support fragmentation message(ISSUE#43) + - fix some bugs + +- v0.11.0 + + - Only log non-normal close status(ISSUE#31) + - Fix default Origin isn't URI(ISSUE#32) + - fileno support(ISSUE#33) + +- v0.10.0 + + - allow to set HTTP Header to WebSocketApp(ISSUE#27) + - fix typo in pydoc(ISSUE#28) + - Passing a socketopt flag to the websocket constructor(ISSUE#29) + - websocket.send fails with long data(ISSUE#30) + + +- v0.9.0 + + - allow to set opcode in WebSocketApp.send(ISSUE#25) + - allow to modify Origin(ISSUE#26) + +- v0.8.0 + + - many bug fix + - some performance improvement + +- v0.7.0 + + - fixed problem to read long data.(ISSUE#12) + - fix buffer size boundary violation + +- v0.6.0 + + - Patches: UUID4, self.keep_running, mask_key (ISSUE#11) + - add wsdump.py tool + +- v0.5.2 + + - fix Echo App Demo Throw Error: 'NoneType' object has no attribute 'opcode (ISSUE#10) + +- v0.5.1 + + - delete invalid print statement. + +- v0.5.0 + + - support hybi-13 protocol. + +- v0.4.1 + + - fix incorrect custom header order(ISSUE#1) diff --git a/src/lib/websocket/LICENSE b/src/lib/websocket/LICENSE new file mode 100644 index 000000000..342ae716a --- /dev/null +++ b/src/lib/websocket/LICENSE @@ -0,0 +1,135 @@ +GNU LESSER GENERAL PUBLIC LICENSE +Version 2.1, February 1999 + +Copyright (C) 1991, 1999 Free Software Foundation, Inc. +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA +Everyone is permitted to copy and distribute verbatim copies +of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] +Preamble +The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public Licenses are intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. + +This license, the Lesser General Public License, applies to some specially designated software packages--typically libraries--of the Free Software Foundation and other authors who decide to use it. You can use it too, but we suggest you first think carefully about whether this license or the ordinary General Public License is the better strategy to use in any particular case, based on the explanations below. + +When we speak of free software, we are referring to freedom of use, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish); that you receive source code or can get it if you want it; that you can change the software and use pieces of it in new free programs; and that you are informed that you can do these things. + +To protect your rights, we need to make restrictions that forbid distributors to deny you these rights or to ask you to surrender these rights. These restrictions translate to certain responsibilities for you if you distribute copies of the library or if you modify it. + +For example, if you distribute copies of the library, whether gratis or for a fee, you must give the recipients all the rights that we gave you. You must make sure that they, too, receive or can get the source code. If you link other code with the library, you must provide complete object files to the recipients, so that they can relink them with the library after making changes to the library and recompiling it. And you must show them these terms so they know their rights. + +We protect your rights with a two-step method: (1) we copyright the library, and (2) we offer you this license, which gives you legal permission to copy, distribute and/or modify the library. + +To protect each distributor, we want to make it very clear that there is no warranty for the free library. Also, if the library is modified by someone else and passed on, the recipients should know that what they have is not the original version, so that the original author's reputation will not be affected by problems that might be introduced by others. + +Finally, software patents pose a constant threat to the existence of any free program. We wish to make sure that a company cannot effectively restrict the users of a free program by obtaining a restrictive license from a patent holder. Therefore, we insist that any patent license obtained for a version of the library must be consistent with the full freedom of use specified in this license. + +Most GNU software, including some libraries, is covered by the ordinary GNU General Public License. This license, the GNU Lesser General Public License, applies to certain designated libraries, and is quite different from the ordinary General Public License. We use this license for certain libraries in order to permit linking those libraries into non-free programs. + +When a program is linked with a library, whether statically or using a shared library, the combination of the two is legally speaking a combined work, a derivative of the original library. The ordinary General Public License therefore permits such linking only if the entire combination fits its criteria of freedom. The Lesser General Public License permits more lax criteria for linking other code with the library. + +We call this license the "Lesser" General Public License because it does Less to protect the user's freedom than the ordinary General Public License. It also provides other free software developers Less of an advantage over competing non-free programs. These disadvantages are the reason we use the ordinary General Public License for many libraries. However, the Lesser license provides advantages in certain special circumstances. + +For example, on rare occasions, there may be a special need to encourage the widest possible use of a certain library, so that it becomes a de-facto standard. To achieve this, non-free programs must be allowed to use the library. A more frequent case is that a free library does the same job as widely used non-free libraries. In this case, there is little to gain by limiting the free library to free software only, so we use the Lesser General Public License. + +In other cases, permission to use a particular library in non-free programs enables a greater number of people to use a large body of free software. For example, permission to use the GNU C Library in non-free programs enables many more people to use the whole GNU operating system, as well as its variant, the GNU/Linux operating system. + +Although the Lesser General Public License is Less protective of the users' freedom, it does ensure that the user of a program that is linked with the Library has the freedom and the wherewithal to run that program using a modified version of the Library. + +The precise terms and conditions for copying, distribution and modification follow. Pay close attention to the difference between a "work based on the library" and a "work that uses the library". The former contains code derived from the library, whereas the latter must be combined with the library in order to run. + +TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION +0. This License Agreement applies to any software library or other program which contains a notice placed by the copyright holder or other authorized party saying it may be distributed under the terms of this Lesser General Public License (also called "this License"). Each licensee is addressed as "you". + +A "library" means a collection of software functions and/or data prepared so as to be conveniently linked with application programs (which use some of those functions and data) to form executables. + +The "Library", below, refers to any such software library or work which has been distributed under these terms. A "work based on the Library" means either the Library or any derivative work under copyright law: that is to say, a work containing the Library or a portion of it, either verbatim or with modifications and/or translated straightforwardly into another language. (Hereinafter, translation is included without limitation in the term "modification".) + +"Source code" for a work means the preferred form of the work for making modifications to it. For a library, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the library. + +Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running a program using the Library is not restricted, and output from such a program is covered only if its contents constitute a work based on the Library (independent of the use of the Library in a tool for writing it). Whether that is true depends on what the Library does and what the program that uses the Library does. + +1. You may copy and distribute verbatim copies of the Library's complete source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and distribute a copy of this License along with the Library. + +You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. + +2. You may modify your copy or copies of the Library or any portion of it, thus forming a work based on the Library, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: + +a) The modified work must itself be a software library. +b) You must cause the files modified to carry prominent notices stating that you changed the files and the date of any change. +c) You must cause the whole of the work to be licensed at no charge to all third parties under the terms of this License. +d) If a facility in the modified Library refers to a function or a table of data to be supplied by an application program that uses the facility, other than as an argument passed when the facility is invoked, then you must make a good faith effort to ensure that, in the event an application does not supply such function or table, the facility still operates, and performs whatever part of its purpose remains meaningful. +(For example, a function in a library to compute square roots has a purpose that is entirely well-defined independent of the application. Therefore, Subsection 2d requires that any application-supplied function or table used by this function must be optional: if the application does not supply it, the square root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Library, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Library, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library with the Library (or with a work based on the Library) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. + +3. You may opt to apply the terms of the ordinary GNU General Public License instead of this License to a given copy of the Library. To do this, you must alter all the notices that refer to this License, so that they refer to the ordinary GNU General Public License, version 2, instead of to this License. (If a newer version than version 2 of the ordinary GNU General Public License has appeared, then you can specify that version instead if you wish.) Do not make any other change in these notices. + +Once this change is made in a given copy, it is irreversible for that copy, so the ordinary GNU General Public License applies to all subsequent copies and derivative works made from that copy. + +This option is useful when you wish to copy part of the code of the Library into a program that is not a library. + +4. You may copy and distribute the Library (or a portion or derivative of it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange. + +If distribution of object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place satisfies the requirement to distribute the source code, even though third parties are not compelled to copy the source along with the object code. + +5. A program that contains no derivative of any portion of the Library, but is designed to work with the Library by being compiled or linked with it, is called a "work that uses the Library". Such a work, in isolation, is not a derivative work of the Library, and therefore falls outside the scope of this License. + +However, linking a "work that uses the Library" with the Library creates an executable that is a derivative of the Library (because it contains portions of the Library), rather than a "work that uses the library". The executable is therefore covered by this License. Section 6 states terms for distribution of such executables. + +When a "work that uses the Library" uses material from a header file that is part of the Library, the object code for the work may be a derivative work of the Library even though the source code is not. Whether this is true is especially significant if the work can be linked without the Library, or if the work is itself a library. The threshold for this to be true is not precisely defined by law. + +If such an object file uses only numerical parameters, data structure layouts and accessors, and small macros and small inline functions (ten lines or less in length), then the use of the object file is unrestricted, regardless of whether it is legally a derivative work. (Executables containing this object code plus portions of the Library will still fall under Section 6.) + +Otherwise, if the work is a derivative of the Library, you may distribute the object code for the work under the terms of Section 6. Any executables containing that work also fall under Section 6, whether or not they are linked directly with the Library itself. + +6. As an exception to the Sections above, you may also combine or link a "work that uses the Library" with the Library to produce a work containing portions of the Library, and distribute that work under terms of your choice, provided that the terms permit modification of the work for the customer's own use and reverse engineering for debugging such modifications. + +You must give prominent notice with each copy of the work that the Library is used in it and that the Library and its use are covered by this License. You must supply a copy of this License. If the work during execution displays copyright notices, you must include the copyright notice for the Library among them, as well as a reference directing the user to the copy of this License. Also, you must do one of these things: + +a) Accompany the work with the complete corresponding machine-readable source code for the Library including whatever changes were used in the work (which must be distributed under Sections 1 and 2 above); and, if the work is an executable linked with the Library, with the complete machine-readable "work that uses the Library", as object code and/or source code, so that the user can modify the Library and then relink to produce a modified executable containing the modified Library. (It is understood that the user who changes the contents of definitions files in the Library will not necessarily be able to recompile the application to use the modified definitions.) +b) Use a suitable shared library mechanism for linking with the Library. A suitable mechanism is one that (1) uses at run time a copy of the library already present on the user's computer system, rather than copying library functions into the executable, and (2) will operate properly with a modified version of the library, if the user installs one, as long as the modified version is interface-compatible with the version that the work was made with. +c) Accompany the work with a written offer, valid for at least three years, to give the same user the materials specified in Subsection 6a, above, for a charge no more than the cost of performing this distribution. +d) If distribution of the work is made by offering access to copy from a designated place, offer equivalent access to copy the above specified materials from the same place. +e) Verify that the user has already received a copy of these materials or that you have already sent this user a copy. +For an executable, the required form of the "work that uses the Library" must include any data and utility programs needed for reproducing the executable from it. However, as a special exception, the materials to be distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. + +It may happen that this requirement contradicts the license restrictions of other proprietary libraries that do not normally accompany the operating system. Such a contradiction means you cannot use both them and the Library together in an executable that you distribute. + +7. You may place library facilities that are a work based on the Library side-by-side in a single library together with other library facilities not covered by this License, and distribute such a combined library, provided that the separate distribution of the work based on the Library and of the other library facilities is otherwise permitted, and provided that you do these two things: + +a) Accompany the combined library with a copy of the same work based on the Library, uncombined with any other library facilities. This must be distributed under the terms of the Sections above. +b) Give prominent notice with the combined library of the fact that part of it is a work based on the Library, and explaining where to find the accompanying uncombined form of the same work. +8. You may not copy, modify, sublicense, link with, or distribute the Library except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense, link with, or distribute the Library is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. + +9. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Library or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Library (or any work based on the Library), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Library or works based on it. + +10. Each time you redistribute the Library (or any work based on the Library), the recipient automatically receives a license from the original licensor to copy, distribute, link with or modify the Library subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties with this License. + +11. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Library at all. For example, if a patent license would not permit royalty-free redistribution of the Library by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply, and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. + +This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. + +12. If the distribution and/or use of the Library is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Library under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. + +13. The Free Software Foundation may publish revised and/or new versions of the Lesser General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Library does not specify a license version number, you may choose any version ever published by the Free Software Foundation. + +14. If you wish to incorporate parts of the Library into other free programs whose distribution conditions are incompatible with these, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. + +NO WARRANTY + +15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + +16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. diff --git a/src/lib/websocket/README.rst b/src/lib/websocket/README.rst new file mode 100644 index 000000000..7c3039836 --- /dev/null +++ b/src/lib/websocket/README.rst @@ -0,0 +1,268 @@ +================= +websocket-client +================= + +websocket-client module is WebSocket client for python. This provide the low level APIs for WebSocket. All APIs are the synchronous functions. + +websocket-client supports only hybi-13. + + +License +============ + + - LGPL + +Installation +============= + +This module is tested on Python 2.7 and Python 3.x. + +Type "python setup.py install" or "pip install websocket-client" to install. + +.. CAUTION:: + + from v0.16.0, we can install by "pip install websocket-client" for python 3. + +This module depend on + + - six + - backports.ssl_match_hostname for Python 2.x + +performance +------------------ + + "send" method is too slow on pure python. If you want to get better performace, please install numpy or wsaccel. +You can get the best performance from numpy. + + +How about Python 3 +=========================== + +Now, we support python 3 on single source code from version 0.14.0. Thanks, @battlemidget and @ralphbean. + +HTTP Proxy +============= + +Support websocket access via http proxy. +The proxy server must allow "CONNECT" method to websocket port. +Default squid setting is "ALLOWED TO CONNECT ONLY HTTPS PORT". + +Current implementation of websocket-client is using "CONNECT" method via proxy. + + +example + +.. code:: python + + import websocket + ws = websocket.WebSocket() + ws.connect("ws://example.com/websocket", http_proxy_host="proxy_host_name", http_proxy_port=3128) + + + + +Examples +======== + +Long-lived connection +--------------------- +This example is similar to how WebSocket code looks in browsers using JavaScript. + +.. code:: python + + import websocket + try: + import thread + except ImportError: + import _thread as thread + import time + + def on_message(ws, message): + print(message) + + def on_error(ws, error): + print(error) + + def on_close(ws): + print("### closed ###") + + def on_open(ws): + def run(*args): + for i in range(3): + time.sleep(1) + ws.send("Hello %d" % i) + time.sleep(1) + ws.close() + print("thread terminating...") + thread.start_new_thread(run, ()) + + + if __name__ == "__main__": + websocket.enableTrace(True) + ws = websocket.WebSocketApp("ws://echo.websocket.org/", + on_message = on_message, + on_error = on_error, + on_close = on_close) + ws.on_open = on_open + ws.run_forever() + + +Short-lived one-off send-receive +-------------------------------- +This is if you want to communicate a short message and disconnect immediately when done. + +.. code:: python + + from websocket import create_connection + ws = create_connection("ws://echo.websocket.org/") + print("Sending 'Hello, World'...") + ws.send("Hello, World") + print("Sent") + print("Receiving...") + result = ws.recv() + print("Received '%s'" % result) + ws.close() + +If you want to customize socket options, set sockopt. + +sockopt example + +.. code:: python + + from websocket import create_connection + ws = create_connection("ws://echo.websocket.org/", + sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY),)) + + +More advanced: Custom class +--------------------------- +You can also write your own class for the connection, if you want to handle the nitty-gritty details yourself. + +.. code:: python + + import socket + from websocket import create_connection, WebSocket + class MyWebSocket(WebSocket): + def recv_frame(self): + frame = super().recv_frame() + print('yay! I got this frame: ', frame) + return frame + + ws = create_connection("ws://echo.websocket.org/", + sockopt=((socket.IPPROTO_TCP, socket.TCP_NODELAY, 1),), class_=MyWebSocket) + + +FAQ +============ + +How to disable ssl cert verification? +---------------------------------------- + +Please set sslopt to {"cert_reqs": ssl.CERT_NONE}. + +WebSocketApp sample + +.. code:: python + + ws = websocket.WebSocketApp("wss://echo.websocket.org") + ws.run_forever(sslopt={"cert_reqs": ssl.CERT_NONE}) + +create_connection sample + +.. code:: python + + ws = websocket.create_connection("wss://echo.websocket.org", + sslopt={"cert_reqs": ssl.CERT_NONE}) + +WebSocket sample + +.. code:: python + + ws = websocket.WebSocket(sslopt={"cert_reqs": ssl.CERT_NONE}) + ws.connect("wss://echo.websocket.org") + + +How to disable hostname verification. +---------------------------------------- + +Please set sslopt to {"check_hostname": False}. +(since v0.18.0) + +WebSocketApp sample + +.. code:: python + + ws = websocket.WebSocketApp("wss://echo.websocket.org") + ws.run_forever(sslopt={"check_hostname": False}) + +create_connection sample + +.. code:: python + + ws = websocket.create_connection("wss://echo.websocket.org", + sslopt={"check_hostname": False}) + +WebSocket sample + +.. code:: python + + ws = websocket.WebSocket(sslopt={"check_hostname": False}) + ws.connect("wss://echo.websocket.org") + + +How to enable `SNI `_? +--------------------------------------------------------------------------- + +SNI support is available for Python 2.7.9+ and 3.2+. It will be enabled automatically whenever possible. + + +Sub Protocols. +---------------------------------------- + +The server needs to support sub protocols, please set the subprotocol like this. + + +Subprotocol sample + +.. code:: python + + ws = websocket.create_connection("ws://example.com/websocket", subprotocols=["binary", "base64"]) + + + +wsdump.py +============ + +wsdump.py is simple WebSocket test(debug) tool. + +sample for echo.websocket.org:: + + $ wsdump.py ws://echo.websocket.org/ + Press Ctrl+C to quit + > Hello, WebSocket + < Hello, WebSocket + > How are you? + < How are you? + +Usage +--------- + +usage:: + + wsdump.py [-h] [-v [VERBOSE]] ws_url + +WebSocket Simple Dump Tool + +positional arguments: + ws_url websocket url. ex. ws://echo.websocket.org/ + +optional arguments: + -h, --help show this help message and exit +WebSocketApp + -v VERBOSE, --verbose VERBOSE set verbose mode. If set to 1, show opcode. If set to 2, enable to trace websocket module + +example:: + + $ wsdump.py ws://echo.websocket.org/ + $ wsdump.py ws://echo.websocket.org/ -v + $ wsdump.py ws://echo.websocket.org/ -vv diff --git a/src/lib/websocket/__init__.py b/src/lib/websocket/__init__.py new file mode 100644 index 000000000..b90e65ada --- /dev/null +++ b/src/lib/websocket/__init__.py @@ -0,0 +1,29 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" +from ._abnf import * +from ._app import WebSocketApp +from ._core import * +from ._exceptions import * +from ._logging import * +from ._socket import * + +__version__ = "0.47.0" diff --git a/src/lib/websocket/_abnf.py b/src/lib/websocket/_abnf.py new file mode 100644 index 000000000..a0000fa1c --- /dev/null +++ b/src/lib/websocket/_abnf.py @@ -0,0 +1,447 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" +import array +import os +import struct + +import six + +from ._exceptions import * +from ._utils import validate_utf8 +from threading import Lock + +try: + if six.PY3: + import numpy + else: + numpy = None +except ImportError: + numpy = None + +try: + # If wsaccel is available we use compiled routines to mask data. + if not numpy: + from wsaccel.xormask import XorMaskerSimple + + def _mask(_m, _d): + return XorMaskerSimple(_m).process(_d) +except ImportError: + # wsaccel is not available, we rely on python implementations. + def _mask(_m, _d): + for i in range(len(_d)): + _d[i] ^= _m[i % 4] + + if six.PY3: + return _d.tobytes() + else: + return _d.tostring() + + +__all__ = [ + 'ABNF', 'continuous_frame', 'frame_buffer', + 'STATUS_NORMAL', + 'STATUS_GOING_AWAY', + 'STATUS_PROTOCOL_ERROR', + 'STATUS_UNSUPPORTED_DATA_TYPE', + 'STATUS_STATUS_NOT_AVAILABLE', + 'STATUS_ABNORMAL_CLOSED', + 'STATUS_INVALID_PAYLOAD', + 'STATUS_POLICY_VIOLATION', + 'STATUS_MESSAGE_TOO_BIG', + 'STATUS_INVALID_EXTENSION', + 'STATUS_UNEXPECTED_CONDITION', + 'STATUS_BAD_GATEWAY', + 'STATUS_TLS_HANDSHAKE_ERROR', +] + +# closing frame status codes. +STATUS_NORMAL = 1000 +STATUS_GOING_AWAY = 1001 +STATUS_PROTOCOL_ERROR = 1002 +STATUS_UNSUPPORTED_DATA_TYPE = 1003 +STATUS_STATUS_NOT_AVAILABLE = 1005 +STATUS_ABNORMAL_CLOSED = 1006 +STATUS_INVALID_PAYLOAD = 1007 +STATUS_POLICY_VIOLATION = 1008 +STATUS_MESSAGE_TOO_BIG = 1009 +STATUS_INVALID_EXTENSION = 1010 +STATUS_UNEXPECTED_CONDITION = 1011 +STATUS_BAD_GATEWAY = 1014 +STATUS_TLS_HANDSHAKE_ERROR = 1015 + +VALID_CLOSE_STATUS = ( + STATUS_NORMAL, + STATUS_GOING_AWAY, + STATUS_PROTOCOL_ERROR, + STATUS_UNSUPPORTED_DATA_TYPE, + STATUS_INVALID_PAYLOAD, + STATUS_POLICY_VIOLATION, + STATUS_MESSAGE_TOO_BIG, + STATUS_INVALID_EXTENSION, + STATUS_UNEXPECTED_CONDITION, + STATUS_BAD_GATEWAY, +) + + +class ABNF(object): + """ + ABNF frame class. + see http://tools.ietf.org/html/rfc5234 + and http://tools.ietf.org/html/rfc6455#section-5.2 + """ + + # operation code values. + OPCODE_CONT = 0x0 + OPCODE_TEXT = 0x1 + OPCODE_BINARY = 0x2 + OPCODE_CLOSE = 0x8 + OPCODE_PING = 0x9 + OPCODE_PONG = 0xa + + # available operation code value tuple + OPCODES = (OPCODE_CONT, OPCODE_TEXT, OPCODE_BINARY, OPCODE_CLOSE, + OPCODE_PING, OPCODE_PONG) + + # opcode human readable string + OPCODE_MAP = { + OPCODE_CONT: "cont", + OPCODE_TEXT: "text", + OPCODE_BINARY: "binary", + OPCODE_CLOSE: "close", + OPCODE_PING: "ping", + OPCODE_PONG: "pong" + } + + # data length threshold. + LENGTH_7 = 0x7e + LENGTH_16 = 1 << 16 + LENGTH_63 = 1 << 63 + + def __init__(self, fin=0, rsv1=0, rsv2=0, rsv3=0, + opcode=OPCODE_TEXT, mask=1, data=""): + """ + Constructor for ABNF. + please check RFC for arguments. + """ + self.fin = fin + self.rsv1 = rsv1 + self.rsv2 = rsv2 + self.rsv3 = rsv3 + self.opcode = opcode + self.mask = mask + if data is None: + data = "" + self.data = data + self.get_mask_key = os.urandom + + def validate(self, skip_utf8_validation=False): + """ + validate the ABNF frame. + skip_utf8_validation: skip utf8 validation. + """ + if self.rsv1 or self.rsv2 or self.rsv3: + raise WebSocketProtocolException("rsv is not implemented, yet") + + if self.opcode not in ABNF.OPCODES: + raise WebSocketProtocolException("Invalid opcode %r", self.opcode) + + if self.opcode == ABNF.OPCODE_PING and not self.fin: + raise WebSocketProtocolException("Invalid ping frame.") + + if self.opcode == ABNF.OPCODE_CLOSE: + l = len(self.data) + if not l: + return + if l == 1 or l >= 126: + raise WebSocketProtocolException("Invalid close frame.") + if l > 2 and not skip_utf8_validation and not validate_utf8(self.data[2:]): + raise WebSocketProtocolException("Invalid close frame.") + + code = 256 * \ + six.byte2int(self.data[0:1]) + six.byte2int(self.data[1:2]) + if not self._is_valid_close_status(code): + raise WebSocketProtocolException("Invalid close opcode.") + + @staticmethod + def _is_valid_close_status(code): + return code in VALID_CLOSE_STATUS or (3000 <= code < 5000) + + def __str__(self): + return "fin=" + str(self.fin) \ + + " opcode=" + str(self.opcode) \ + + " data=" + str(self.data) + + @staticmethod + def create_frame(data, opcode, fin=1): + """ + create frame to send text, binary and other data. + + data: data to send. This is string value(byte array). + if opcode is OPCODE_TEXT and this value is unicode, + data value is converted into unicode string, automatically. + + opcode: operation code. please see OPCODE_XXX. + + fin: fin flag. if set to 0, create continue fragmentation. + """ + if opcode == ABNF.OPCODE_TEXT and isinstance(data, six.text_type): + data = data.encode("utf-8") + # mask must be set if send data from client + return ABNF(fin, 0, 0, 0, opcode, 1, data) + + def format(self): + """ + format this object to string(byte array) to send data to server. + """ + if any(x not in (0, 1) for x in [self.fin, self.rsv1, self.rsv2, self.rsv3]): + raise ValueError("not 0 or 1") + if self.opcode not in ABNF.OPCODES: + raise ValueError("Invalid OPCODE") + length = len(self.data) + if length >= ABNF.LENGTH_63: + raise ValueError("data is too long") + + frame_header = chr(self.fin << 7 + | self.rsv1 << 6 | self.rsv2 << 5 | self.rsv3 << 4 + | self.opcode) + if length < ABNF.LENGTH_7: + frame_header += chr(self.mask << 7 | length) + frame_header = six.b(frame_header) + elif length < ABNF.LENGTH_16: + frame_header += chr(self.mask << 7 | 0x7e) + frame_header = six.b(frame_header) + frame_header += struct.pack("!H", length) + else: + frame_header += chr(self.mask << 7 | 0x7f) + frame_header = six.b(frame_header) + frame_header += struct.pack("!Q", length) + + if not self.mask: + return frame_header + self.data + else: + mask_key = self.get_mask_key(4) + return frame_header + self._get_masked(mask_key) + + def _get_masked(self, mask_key): + s = ABNF.mask(mask_key, self.data) + + if isinstance(mask_key, six.text_type): + mask_key = mask_key.encode('utf-8') + + return mask_key + s + + @staticmethod + def mask(mask_key, data): + """ + mask or unmask data. Just do xor for each byte + + mask_key: 4 byte string(byte). + + data: data to mask/unmask. + """ + if data is None: + data = "" + + if isinstance(mask_key, six.text_type): + mask_key = six.b(mask_key) + + if isinstance(data, six.text_type): + data = six.b(data) + + if numpy: + origlen = len(data) + _mask_key = mask_key[3] << 24 | mask_key[2] << 16 | mask_key[1] << 8 | mask_key[0] + + # We need data to be a multiple of four... + data += bytes(" " * (4 - (len(data) % 4)), "us-ascii") + a = numpy.frombuffer(data, dtype="uint32") + masked = numpy.bitwise_xor(a, [_mask_key]).astype("uint32") + if len(data) > origlen: + return masked.tobytes()[:origlen] + return masked.tobytes() + else: + _m = array.array("B", mask_key) + _d = array.array("B", data) + return _mask(_m, _d) + + +class frame_buffer(object): + _HEADER_MASK_INDEX = 5 + _HEADER_LENGTH_INDEX = 6 + + def __init__(self, recv_fn, skip_utf8_validation): + self.recv = recv_fn + self.skip_utf8_validation = skip_utf8_validation + # Buffers over the packets from the layer beneath until desired amount + # bytes of bytes are received. + self.recv_buffer = [] + self.clear() + self.lock = Lock() + + def clear(self): + self.header = None + self.length = None + self.mask = None + + def has_received_header(self): + return self.header is None + + def recv_header(self): + header = self.recv_strict(2) + b1 = header[0] + + if six.PY2: + b1 = ord(b1) + + fin = b1 >> 7 & 1 + rsv1 = b1 >> 6 & 1 + rsv2 = b1 >> 5 & 1 + rsv3 = b1 >> 4 & 1 + opcode = b1 & 0xf + b2 = header[1] + + if six.PY2: + b2 = ord(b2) + + has_mask = b2 >> 7 & 1 + length_bits = b2 & 0x7f + + self.header = (fin, rsv1, rsv2, rsv3, opcode, has_mask, length_bits) + + def has_mask(self): + if not self.header: + return False + return self.header[frame_buffer._HEADER_MASK_INDEX] + + def has_received_length(self): + return self.length is None + + def recv_length(self): + bits = self.header[frame_buffer._HEADER_LENGTH_INDEX] + length_bits = bits & 0x7f + if length_bits == 0x7e: + v = self.recv_strict(2) + self.length = struct.unpack("!H", v)[0] + elif length_bits == 0x7f: + v = self.recv_strict(8) + self.length = struct.unpack("!Q", v)[0] + else: + self.length = length_bits + + def has_received_mask(self): + return self.mask is None + + def recv_mask(self): + self.mask = self.recv_strict(4) if self.has_mask() else "" + + def recv_frame(self): + + with self.lock: + # Header + if self.has_received_header(): + self.recv_header() + (fin, rsv1, rsv2, rsv3, opcode, has_mask, _) = self.header + + # Frame length + if self.has_received_length(): + self.recv_length() + length = self.length + + # Mask + if self.has_received_mask(): + self.recv_mask() + mask = self.mask + + # Payload + payload = self.recv_strict(length) + if has_mask: + payload = ABNF.mask(mask, payload) + + # Reset for next frame + self.clear() + + frame = ABNF(fin, rsv1, rsv2, rsv3, opcode, has_mask, payload) + frame.validate(self.skip_utf8_validation) + + return frame + + def recv_strict(self, bufsize): + shortage = bufsize - sum(len(x) for x in self.recv_buffer) + while shortage > 0: + # Limit buffer size that we pass to socket.recv() to avoid + # fragmenting the heap -- the number of bytes recv() actually + # reads is limited by socket buffer and is relatively small, + # yet passing large numbers repeatedly causes lots of large + # buffers allocated and then shrunk, which results in + # fragmentation. + bytes_ = self.recv(min(16384, shortage)) + self.recv_buffer.append(bytes_) + shortage -= len(bytes_) + + unified = six.b("").join(self.recv_buffer) + + if shortage == 0: + self.recv_buffer = [] + return unified + else: + self.recv_buffer = [unified[bufsize:]] + return unified[:bufsize] + + +class continuous_frame(object): + + def __init__(self, fire_cont_frame, skip_utf8_validation): + self.fire_cont_frame = fire_cont_frame + self.skip_utf8_validation = skip_utf8_validation + self.cont_data = None + self.recving_frames = None + + def validate(self, frame): + if not self.recving_frames and frame.opcode == ABNF.OPCODE_CONT: + raise WebSocketProtocolException("Illegal frame") + if self.recving_frames and \ + frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY): + raise WebSocketProtocolException("Illegal frame") + + def add(self, frame): + if self.cont_data: + self.cont_data[1] += frame.data + else: + if frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY): + self.recving_frames = frame.opcode + self.cont_data = [frame.opcode, frame.data] + + if frame.fin: + self.recving_frames = None + + def is_fire(self, frame): + return frame.fin or self.fire_cont_frame + + def extract(self, frame): + data = self.cont_data + self.cont_data = None + frame.data = data[1] + if not self.fire_cont_frame and data[0] == ABNF.OPCODE_TEXT and not self.skip_utf8_validation and not validate_utf8(frame.data): + raise WebSocketPayloadException( + "cannot decode: " + repr(frame.data)) + + return [data[0], frame] diff --git a/src/lib/websocket/_app.py b/src/lib/websocket/_app.py new file mode 100644 index 000000000..74e90ae02 --- /dev/null +++ b/src/lib/websocket/_app.py @@ -0,0 +1,320 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" + +""" +WebSocketApp provides higher level APIs. +""" +import select +import sys +import threading +import time +import traceback + +import six + +from ._abnf import ABNF +from ._core import WebSocket, getdefaulttimeout +from ._exceptions import * +from . import _logging + + +__all__ = ["WebSocketApp"] + +class Dispatcher: + def __init__(self, app, ping_timeout): + self.app = app + self.ping_timeout = ping_timeout + + def read(self, sock, callback): + while self.app.sock.connected: + r, w, e = select.select( + (self.app.sock.sock, ), (), (), self.ping_timeout) # Use a 10 second timeout to avoid to wait forever on close + if r: + callback() + +class SSLDispacther: + def __init__(self, app, ping_timeout): + self.app = app + self.ping_timeout = ping_timeout + + def read(self, sock, callback): + while self.app.sock.connected: + r = self.select() + if r: + callback() + + def select(self): + sock = self.app.sock.sock + if sock.pending(): + return [sock,] + + r, w, e = select.select((sock, ), (), (), self.ping_timeout) + return r + +class WebSocketApp(object): + """ + Higher level of APIs are provided. + The interface is like JavaScript WebSocket object. + """ + + def __init__(self, url, header=None, + on_open=None, on_message=None, on_error=None, + on_close=None, on_ping=None, on_pong=None, + on_cont_message=None, + keep_running=True, get_mask_key=None, cookie=None, + subprotocols=None, + on_data=None): + """ + url: websocket url. + header: custom header for websocket handshake. + on_open: callable object which is called at opening websocket. + this function has one argument. The argument is this class object. + on_message: callable object which is called when received data. + on_message has 2 arguments. + The 1st argument is this class object. + The 2nd argument is utf-8 string which we get from the server. + on_error: callable object which is called when we get error. + on_error has 2 arguments. + The 1st argument is this class object. + The 2nd argument is exception object. + on_close: callable object which is called when closed the connection. + this function has one argument. The argument is this class object. + on_cont_message: callback object which is called when receive continued + frame data. + on_cont_message has 3 arguments. + The 1st argument is this class object. + The 2nd argument is utf-8 string which we get from the server. + The 3rd argument is continue flag. if 0, the data continue + to next frame data + on_data: callback object which is called when a message received. + This is called before on_message or on_cont_message, + and then on_message or on_cont_message is called. + on_data has 4 argument. + The 1st argument is this class object. + The 2nd argument is utf-8 string which we get from the server. + The 3rd argument is data type. ABNF.OPCODE_TEXT or ABNF.OPCODE_BINARY will be came. + The 4th argument is continue flag. if 0, the data continue + keep_running: this parameter is obosleted and ignored it. + get_mask_key: a callable to produce new mask keys, + see the WebSocket.set_mask_key's docstring for more information + subprotocols: array of available sub protocols. default is None. + """ + self.url = url + self.header = header if header is not None else [] + self.cookie = cookie + self.on_open = on_open + self.on_message = on_message + self.on_data = on_data + self.on_error = on_error + self.on_close = on_close + self.on_ping = on_ping + self.on_pong = on_pong + self.on_cont_message = on_cont_message + self.keep_running = False + self.get_mask_key = get_mask_key + self.sock = None + self.last_ping_tm = 0 + self.last_pong_tm = 0 + self.subprotocols = subprotocols + + def send(self, data, opcode=ABNF.OPCODE_TEXT): + """ + send message. + data: message to send. If you set opcode to OPCODE_TEXT, + data must be utf-8 string or unicode. + opcode: operation code of data. default is OPCODE_TEXT. + """ + + if not self.sock or self.sock.send(data, opcode) == 0: + raise WebSocketConnectionClosedException( + "Connection is already closed.") + + def close(self, **kwargs): + """ + close websocket connection. + """ + self.keep_running = False + if self.sock: + self.sock.close(**kwargs) + + def _send_ping(self, interval, event): + while not event.wait(interval): + self.last_ping_tm = time.time() + if self.sock: + try: + self.sock.ping() + except Exception as ex: + _logging.warning("send_ping routine terminated: {}".format(ex)) + break + + def run_forever(self, sockopt=None, sslopt=None, + ping_interval=0, ping_timeout=None, + http_proxy_host=None, http_proxy_port=None, + http_no_proxy=None, http_proxy_auth=None, + skip_utf8_validation=False, + host=None, origin=None, dispatcher=None): + """ + run event loop for WebSocket framework. + This loop is infinite loop and is alive during websocket is available. + sockopt: values for socket.setsockopt. + sockopt must be tuple + and each element is argument of sock.setsockopt. + sslopt: ssl socket optional dict. + ping_interval: automatically send "ping" command + every specified period(second) + if set to 0, not send automatically. + ping_timeout: timeout(second) if the pong message is not received. + http_proxy_host: http proxy host name. + http_proxy_port: http proxy port. If not set, set to 80. + http_no_proxy: host names, which doesn't use proxy. + skip_utf8_validation: skip utf8 validation. + host: update host header. + origin: update origin header. + """ + + if not ping_timeout or ping_timeout <= 0: + ping_timeout = None + if ping_timeout and ping_interval and ping_interval <= ping_timeout: + raise WebSocketException("Ensure ping_interval > ping_timeout") + if sockopt is None: + sockopt = [] + if sslopt is None: + sslopt = {} + if self.sock: + raise WebSocketException("socket is already opened") + thread = None + close_frame = None + self.keep_running = True + self.last_ping_tm = 0 + self.last_pong_tm = 0 + + def teardown(): + if not self.keep_running: + return + if thread and thread.isAlive(): + event.set() + thread.join() + self.keep_running = False + self.sock.close() + close_args = self._get_close_args( + close_frame.data if close_frame else None) + self._callback(self.on_close, *close_args) + self.sock = None + + try: + self.sock = WebSocket( + self.get_mask_key, sockopt=sockopt, sslopt=sslopt, + fire_cont_frame=self.on_cont_message and True or False, + skip_utf8_validation=skip_utf8_validation) + self.sock.settimeout(getdefaulttimeout()) + self.sock.connect( + self.url, header=self.header, cookie=self.cookie, + http_proxy_host=http_proxy_host, + http_proxy_port=http_proxy_port, http_no_proxy=http_no_proxy, + http_proxy_auth=http_proxy_auth, subprotocols=self.subprotocols, + host=host, origin=origin) + if not dispatcher: + dispatcher = self.create_dispatcher(ping_timeout) + + self._callback(self.on_open) + + if ping_interval: + event = threading.Event() + thread = threading.Thread( + target=self._send_ping, args=(ping_interval, event)) + thread.setDaemon(True) + thread.start() + + def read(): + if not self.keep_running: + return teardown() + + op_code, frame = self.sock.recv_data_frame(True) + if op_code == ABNF.OPCODE_CLOSE: + close_frame = frame + return teardown() + elif op_code == ABNF.OPCODE_PING: + self._callback(self.on_ping, frame.data) + elif op_code == ABNF.OPCODE_PONG: + self.last_pong_tm = time.time() + self._callback(self.on_pong, frame.data) + elif op_code == ABNF.OPCODE_CONT and self.on_cont_message: + self._callback(self.on_data, frame.data, + frame.opcode, frame.fin) + self._callback(self.on_cont_message, + frame.data, frame.fin) + else: + data = frame.data + if six.PY3 and op_code == ABNF.OPCODE_TEXT: + data = data.decode("utf-8") + self._callback(self.on_data, data, frame.opcode, True) + self._callback(self.on_message, data) + + if ping_timeout and self.last_ping_tm \ + and time.time() - self.last_ping_tm > ping_timeout \ + and self.last_ping_tm - self.last_pong_tm > ping_timeout: + raise WebSocketTimeoutException("ping/pong timed out") + return True + + dispatcher.read(self.sock.sock, read) + except (Exception, KeyboardInterrupt, SystemExit) as e: + self._callback(self.on_error, e) + if isinstance(e, SystemExit): + # propagate SystemExit further + raise + teardown() + + def create_dispatcher(self, ping_timeout): + timeout = ping_timeout or 10 + if self.sock.is_ssl(): + return SSLDispacther(self, timeout) + + return Dispatcher(self, timeout) + + def _get_close_args(self, data): + """ this functions extracts the code, reason from the close body + if they exists, and if the self.on_close except three arguments """ + import inspect + # if the on_close callback is "old", just return empty list + if sys.version_info < (3, 0): + if not self.on_close or len(inspect.getargspec(self.on_close).args) != 3: + return [] + else: + if not self.on_close or len(inspect.getfullargspec(self.on_close).args) != 3: + return [] + + if data and len(data) >= 2: + code = 256 * six.byte2int(data[0:1]) + six.byte2int(data[1:2]) + reason = data[2:].decode('utf-8') + return [code, reason] + + return [None, None] + + def _callback(self, callback, *args): + if callback: + try: + callback(self, *args) + except Exception as e: + _logging.error("error from callback {}: {}".format(callback, e)) + if _logging.isEnabledForDebug(): + _, _, tb = sys.exc_info() + traceback.print_tb(tb) diff --git a/src/lib/websocket/_cookiejar.py b/src/lib/websocket/_cookiejar.py new file mode 100644 index 000000000..3efeb0fd2 --- /dev/null +++ b/src/lib/websocket/_cookiejar.py @@ -0,0 +1,52 @@ +try: + import Cookie +except: + import http.cookies as Cookie + + +class SimpleCookieJar(object): + def __init__(self): + self.jar = dict() + + def add(self, set_cookie): + if set_cookie: + try: + simpleCookie = Cookie.SimpleCookie(set_cookie) + except: + simpleCookie = Cookie.SimpleCookie(set_cookie.encode('ascii', 'ignore')) + + for k, v in simpleCookie.items(): + domain = v.get("domain") + if domain: + if not domain.startswith("."): + domain = "." + domain + cookie = self.jar.get(domain) if self.jar.get(domain) else Cookie.SimpleCookie() + cookie.update(simpleCookie) + self.jar[domain.lower()] = cookie + + def set(self, set_cookie): + if set_cookie: + try: + simpleCookie = Cookie.SimpleCookie(set_cookie) + except: + simpleCookie = Cookie.SimpleCookie(set_cookie.encode('ascii', 'ignore')) + + for k, v in simpleCookie.items(): + domain = v.get("domain") + if domain: + if not domain.startswith("."): + domain = "." + domain + self.jar[domain.lower()] = simpleCookie + + def get(self, host): + if not host: + return "" + + cookies = [] + for domain, simpleCookie in self.jar.items(): + host = host.lower() + if host.endswith(domain) or host == domain[1:]: + cookies.append(self.jar.get(domain)) + + return "; ".join(filter(None, ["%s=%s" % (k, v.value) for cookie in filter(None, sorted(cookies)) for k, v in + sorted(cookie.items())])) diff --git a/src/lib/websocket/_core.py b/src/lib/websocket/_core.py new file mode 100644 index 000000000..2d009621f --- /dev/null +++ b/src/lib/websocket/_core.py @@ -0,0 +1,495 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" +from __future__ import print_function + +import socket +import struct +import threading + +import six + +# websocket modules +from ._abnf import * +from ._exceptions import * +from ._handshake import * +from ._http import * +from ._logging import * +from ._socket import * +from ._ssl_compat import * +from ._utils import * + +__all__ = ['WebSocket', 'create_connection'] + +""" +websocket python client. +========================= + +This version support only hybi-13. +Please see http://tools.ietf.org/html/rfc6455 for protocol. +""" + + +class WebSocket(object): + """ + Low level WebSocket interface. + This class is based on + The WebSocket protocol draft-hixie-thewebsocketprotocol-76 + http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76 + + We can connect to the websocket server and send/receive data. + The following example is an echo client. + + >>> import websocket + >>> ws = websocket.WebSocket() + >>> ws.connect("ws://echo.websocket.org") + >>> ws.send("Hello, Server") + >>> ws.recv() + 'Hello, Server' + >>> ws.close() + + get_mask_key: a callable to produce new mask keys, see the set_mask_key + function's docstring for more details + sockopt: values for socket.setsockopt. + sockopt must be tuple and each element is argument of sock.setsockopt. + sslopt: dict object for ssl socket option. + fire_cont_frame: fire recv event for each cont frame. default is False + enable_multithread: if set to True, lock send method. + skip_utf8_validation: skip utf8 validation. + """ + + def __init__(self, get_mask_key=None, sockopt=None, sslopt=None, + fire_cont_frame=False, enable_multithread=False, + skip_utf8_validation=False, **_): + """ + Initialize WebSocket object. + """ + self.sock_opt = sock_opt(sockopt, sslopt) + self.handshake_response = None + self.sock = None + + self.connected = False + self.get_mask_key = get_mask_key + # These buffer over the build-up of a single frame. + self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation) + self.cont_frame = continuous_frame( + fire_cont_frame, skip_utf8_validation) + + if enable_multithread: + self.lock = threading.Lock() + self.readlock = threading.Lock() + else: + self.lock = NoLock() + self.readlock = NoLock() + + def __iter__(self): + """ + Allow iteration over websocket, implying sequential `recv` executions. + """ + while True: + yield self.recv() + + def __next__(self): + return self.recv() + + def next(self): + return self.__next__() + + def fileno(self): + return self.sock.fileno() + + def set_mask_key(self, func): + """ + set function to create musk key. You can customize mask key generator. + Mainly, this is for testing purpose. + + func: callable object. the func takes 1 argument as integer. + The argument means length of mask key. + This func must return string(byte array), + which length is argument specified. + """ + self.get_mask_key = func + + def gettimeout(self): + """ + Get the websocket timeout(second). + """ + return self.sock_opt.timeout + + def settimeout(self, timeout): + """ + Set the timeout to the websocket. + + timeout: timeout time(second). + """ + self.sock_opt.timeout = timeout + if self.sock: + self.sock.settimeout(timeout) + + timeout = property(gettimeout, settimeout) + + def getsubprotocol(self): + """ + get subprotocol + """ + if self.handshake_response: + return self.handshake_response.subprotocol + else: + return None + + subprotocol = property(getsubprotocol) + + def getstatus(self): + """ + get handshake status + """ + if self.handshake_response: + return self.handshake_response.status + else: + return None + + status = property(getstatus) + + def getheaders(self): + """ + get handshake response header + """ + if self.handshake_response: + return self.handshake_response.headers + else: + return None + + def is_ssl(self): + return isinstance(self.sock, ssl.SSLSocket) + + headers = property(getheaders) + + def connect(self, url, **options): + """ + Connect to url. url is websocket url scheme. + ie. ws://host:port/resource + You can customize using 'options'. + If you set "header" list object, you can set your own custom header. + + >>> ws = WebSocket() + >>> ws.connect("ws://echo.websocket.org/", + ... header=["User-Agent: MyProgram", + ... "x-custom: header"]) + + timeout: socket timeout time. This value is integer. + if you set None for this value, + it means "use default_timeout value" + + options: "header" -> custom http header list or dict. + "cookie" -> cookie value. + "origin" -> custom origin url. + "host" -> custom host header string. + "http_proxy_host" - http proxy host name. + "http_proxy_port" - http proxy port. If not set, set to 80. + "http_no_proxy" - host names, which doesn't use proxy. + "http_proxy_auth" - http proxy auth information. + tuple of username and password. + default is None + "subprotocols" - array of available sub protocols. + default is None. + "socket" - pre-initialized stream socket. + + """ + self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options), + options.pop('socket', None)) + + try: + self.handshake_response = handshake(self.sock, *addrs, **options) + self.connected = True + except: + if self.sock: + self.sock.close() + self.sock = None + raise + + def send(self, payload, opcode=ABNF.OPCODE_TEXT): + """ + Send the data as string. + + payload: Payload must be utf-8 string or unicode, + if the opcode is OPCODE_TEXT. + Otherwise, it must be string(byte array) + + opcode: operation code to send. Please see OPCODE_XXX. + """ + + frame = ABNF.create_frame(payload, opcode) + return self.send_frame(frame) + + def send_frame(self, frame): + """ + Send the data frame. + + frame: frame data created by ABNF.create_frame + + >>> ws = create_connection("ws://echo.websocket.org/") + >>> frame = ABNF.create_frame("Hello", ABNF.OPCODE_TEXT) + >>> ws.send_frame(frame) + >>> cont_frame = ABNF.create_frame("My name is ", ABNF.OPCODE_CONT, 0) + >>> ws.send_frame(frame) + >>> cont_frame = ABNF.create_frame("Foo Bar", ABNF.OPCODE_CONT, 1) + >>> ws.send_frame(frame) + + """ + if self.get_mask_key: + frame.get_mask_key = self.get_mask_key + data = frame.format() + length = len(data) + trace("send: " + repr(data)) + + with self.lock: + while data: + l = self._send(data) + data = data[l:] + + return length + + def send_binary(self, payload): + return self.send(payload, ABNF.OPCODE_BINARY) + + def ping(self, payload=""): + """ + send ping data. + + payload: data payload to send server. + """ + if isinstance(payload, six.text_type): + payload = payload.encode("utf-8") + self.send(payload, ABNF.OPCODE_PING) + + def pong(self, payload): + """ + send pong data. + + payload: data payload to send server. + """ + if isinstance(payload, six.text_type): + payload = payload.encode("utf-8") + self.send(payload, ABNF.OPCODE_PONG) + + def recv(self): + """ + Receive string data(byte array) from the server. + + return value: string(byte array) value. + """ + with self.readlock: + opcode, data = self.recv_data() + if six.PY3 and opcode == ABNF.OPCODE_TEXT: + return data.decode("utf-8") + elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY: + return data + else: + return '' + + def recv_data(self, control_frame=False): + """ + Receive data with operation code. + + control_frame: a boolean flag indicating whether to return control frame + data, defaults to False + + return value: tuple of operation code and string(byte array) value. + """ + opcode, frame = self.recv_data_frame(control_frame) + return opcode, frame.data + + def recv_data_frame(self, control_frame=False): + """ + Receive data with operation code. + + control_frame: a boolean flag indicating whether to return control frame + data, defaults to False + + return value: tuple of operation code and string(byte array) value. + """ + while True: + frame = self.recv_frame() + if not frame: + # handle error: + # 'NoneType' object has no attribute 'opcode' + raise WebSocketProtocolException( + "Not a valid frame %s" % frame) + elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT): + self.cont_frame.validate(frame) + self.cont_frame.add(frame) + + if self.cont_frame.is_fire(frame): + return self.cont_frame.extract(frame) + + elif frame.opcode == ABNF.OPCODE_CLOSE: + self.send_close() + return frame.opcode, frame + elif frame.opcode == ABNF.OPCODE_PING: + if len(frame.data) < 126: + self.pong(frame.data) + else: + raise WebSocketProtocolException( + "Ping message is too long") + if control_frame: + return frame.opcode, frame + elif frame.opcode == ABNF.OPCODE_PONG: + if control_frame: + return frame.opcode, frame + + def recv_frame(self): + """ + receive data as frame from server. + + return value: ABNF frame object. + """ + return self.frame_buffer.recv_frame() + + def send_close(self, status=STATUS_NORMAL, reason=six.b("")): + """ + send close data to the server. + + status: status code to send. see STATUS_XXX. + + reason: the reason to close. This must be string or bytes. + """ + if status < 0 or status >= ABNF.LENGTH_16: + raise ValueError("code is invalid range") + self.connected = False + self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE) + + def close(self, status=STATUS_NORMAL, reason=six.b(""), timeout=3): + """ + Close Websocket object + + status: status code to send. see STATUS_XXX. + + reason: the reason to close. This must be string. + + timeout: timeout until receive a close frame. + If None, it will wait forever until receive a close frame. + """ + if self.connected: + if status < 0 or status >= ABNF.LENGTH_16: + raise ValueError("code is invalid range") + + try: + self.connected = False + self.send(struct.pack('!H', status) + + reason, ABNF.OPCODE_CLOSE) + sock_timeout = self.sock.gettimeout() + self.sock.settimeout(timeout) + try: + frame = self.recv_frame() + if isEnabledForError(): + recv_status = struct.unpack("!H", frame.data[0:2])[0] + if recv_status != STATUS_NORMAL: + error("close status: " + repr(recv_status)) + except: + pass + self.sock.settimeout(sock_timeout) + self.sock.shutdown(socket.SHUT_RDWR) + except: + pass + + self.shutdown() + + def abort(self): + """ + Low-level asynchronous abort, wakes up other threads that are waiting in recv_* + """ + if self.connected: + self.sock.shutdown(socket.SHUT_RDWR) + + def shutdown(self): + """close socket, immediately.""" + if self.sock: + self.sock.close() + self.sock = None + self.connected = False + + def _send(self, data): + return send(self.sock, data) + + def _recv(self, bufsize): + try: + return recv(self.sock, bufsize) + except WebSocketConnectionClosedException: + if self.sock: + self.sock.close() + self.sock = None + self.connected = False + raise + + +def create_connection(url, timeout=None, class_=WebSocket, **options): + """ + connect to url and return websocket object. + + Connect to url and return the WebSocket object. + Passing optional timeout parameter will set the timeout on the socket. + If no timeout is supplied, + the global default timeout setting returned by getdefauttimeout() is used. + You can customize using 'options'. + If you set "header" list object, you can set your own custom header. + + >>> conn = create_connection("ws://echo.websocket.org/", + ... header=["User-Agent: MyProgram", + ... "x-custom: header"]) + + + timeout: socket timeout time. This value is integer. + if you set None for this value, + it means "use default_timeout value" + + class_: class to instantiate when creating the connection. It has to implement + settimeout and connect. It's __init__ should be compatible with + WebSocket.__init__, i.e. accept all of it's kwargs. + options: "header" -> custom http header list or dict. + "cookie" -> cookie value. + "origin" -> custom origin url. + "host" -> custom host header string. + "http_proxy_host" - http proxy host name. + "http_proxy_port" - http proxy port. If not set, set to 80. + "http_no_proxy" - host names, which doesn't use proxy. + "http_proxy_auth" - http proxy auth information. + tuple of username and password. + default is None + "enable_multithread" -> enable lock for multithread. + "sockopt" -> socket options + "sslopt" -> ssl option + "subprotocols" - array of available sub protocols. + default is None. + "skip_utf8_validation" - skip utf8 validation. + "socket" - pre-initialized stream socket. + """ + sockopt = options.pop("sockopt", []) + sslopt = options.pop("sslopt", {}) + fire_cont_frame = options.pop("fire_cont_frame", False) + enable_multithread = options.pop("enable_multithread", False) + skip_utf8_validation = options.pop("skip_utf8_validation", False) + websock = class_(sockopt=sockopt, sslopt=sslopt, + fire_cont_frame=fire_cont_frame, + enable_multithread=enable_multithread, + skip_utf8_validation=skip_utf8_validation, **options) + websock.settimeout(timeout if timeout is not None else getdefaulttimeout()) + websock.connect(url, **options) + return websock diff --git a/src/lib/websocket/_exceptions.py b/src/lib/websocket/_exceptions.py new file mode 100644 index 000000000..24c85e0ee --- /dev/null +++ b/src/lib/websocket/_exceptions.py @@ -0,0 +1,87 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" + + +""" +define websocket exceptions +""" + + +class WebSocketException(Exception): + """ + websocket exception class. + """ + pass + + +class WebSocketProtocolException(WebSocketException): + """ + If the websocket protocol is invalid, this exception will be raised. + """ + pass + + +class WebSocketPayloadException(WebSocketException): + """ + If the websocket payload is invalid, this exception will be raised. + """ + pass + + +class WebSocketConnectionClosedException(WebSocketException): + """ + If remote host closed the connection or some network error happened, + this exception will be raised. + """ + pass + + +class WebSocketTimeoutException(WebSocketException): + """ + WebSocketTimeoutException will be raised at socket timeout during read/write data. + """ + pass + + +class WebSocketProxyException(WebSocketException): + """ + WebSocketProxyException will be raised when proxy error occurred. + """ + pass + + +class WebSocketBadStatusException(WebSocketException): + """ + WebSocketBadStatusException will be raised when we get bad handshake status code. + """ + + def __init__(self, message, status_code, status_message=None): + msg = message % (status_code, status_message) if status_message is not None \ + else message % status_code + super(WebSocketBadStatusException, self).__init__(msg) + self.status_code = status_code + +class WebSocketAddressException(WebSocketException): + """ + If the websocket address info cannot be found, this exception will be raised. + """ + pass diff --git a/src/lib/websocket/_handshake.py b/src/lib/websocket/_handshake.py new file mode 100644 index 000000000..3fd5c9eed --- /dev/null +++ b/src/lib/websocket/_handshake.py @@ -0,0 +1,180 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" +import hashlib +import hmac +import os + +import six + +from ._cookiejar import SimpleCookieJar +from ._exceptions import * +from ._http import * +from ._logging import * +from ._socket import * + +if six.PY3: + from base64 import encodebytes as base64encode +else: + from base64 import encodestring as base64encode + +__all__ = ["handshake_response", "handshake"] + +if hasattr(hmac, "compare_digest"): + compare_digest = hmac.compare_digest +else: + def compare_digest(s1, s2): + return s1 == s2 + +# websocket supported version. +VERSION = 13 + +CookieJar = SimpleCookieJar() + + +class handshake_response(object): + + def __init__(self, status, headers, subprotocol): + self.status = status + self.headers = headers + self.subprotocol = subprotocol + CookieJar.add(headers.get("set-cookie")) + + +def handshake(sock, hostname, port, resource, **options): + headers, key = _get_handshake_headers(resource, hostname, port, options) + + header_str = "\r\n".join(headers) + send(sock, header_str) + dump("request header", header_str) + + status, resp = _get_resp_headers(sock) + success, subproto = _validate(resp, key, options.get("subprotocols")) + if not success: + raise WebSocketException("Invalid WebSocket Header") + + return handshake_response(status, resp, subproto) + +def _pack_hostname(hostname): + # IPv6 address + if ':' in hostname: + return '[' + hostname + ']' + + return hostname + +def _get_handshake_headers(resource, host, port, options): + headers = [ + "GET %s HTTP/1.1" % resource, + "Upgrade: websocket", + "Connection: Upgrade" + ] + if port == 80 or port == 443: + hostport = _pack_hostname(host) + else: + hostport = "%s:%d" % (_pack_hostname(host), port) + + if "host" in options and options["host"] is not None: + headers.append("Host: %s" % options["host"]) + else: + headers.append("Host: %s" % hostport) + + if "origin" in options and options["origin"] is not None: + headers.append("Origin: %s" % options["origin"]) + else: + headers.append("Origin: http://%s" % hostport) + + key = _create_sec_websocket_key() + headers.append("Sec-WebSocket-Key: %s" % key) + headers.append("Sec-WebSocket-Version: %s" % VERSION) + + subprotocols = options.get("subprotocols") + if subprotocols: + headers.append("Sec-WebSocket-Protocol: %s" % ",".join(subprotocols)) + + if "header" in options: + header = options["header"] + if isinstance(header, dict): + header = map(": ".join, header.items()) + headers.extend(header) + + server_cookie = CookieJar.get(host) + client_cookie = options.get("cookie", None) + + cookie = "; ".join(filter(None, [server_cookie, client_cookie])) + + if cookie: + headers.append("Cookie: %s" % cookie) + + headers.append("") + headers.append("") + + return headers, key + + +def _get_resp_headers(sock, success_status=101): + status, resp_headers, status_message = read_headers(sock) + if status != success_status: + raise WebSocketBadStatusException("Handshake status %d %s", status, status_message) + return status, resp_headers + +_HEADERS_TO_CHECK = { + "upgrade": "websocket", + "connection": "upgrade", +} + + +def _validate(headers, key, subprotocols): + subproto = None + for k, v in _HEADERS_TO_CHECK.items(): + r = headers.get(k, None) + if not r: + return False, None + r = r.lower() + if v != r: + return False, None + + if subprotocols: + subproto = headers.get("sec-websocket-protocol", None).lower() + if not subproto or subproto not in [s.lower() for s in subprotocols]: + error("Invalid subprotocol: " + str(subprotocols)) + return False, None + + result = headers.get("sec-websocket-accept", None) + if not result: + return False, None + result = result.lower() + + if isinstance(result, six.text_type): + result = result.encode('utf-8') + + value = (key + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11").encode('utf-8') + hashed = base64encode(hashlib.sha1(value).digest()).strip().lower() + success = compare_digest(hashed, result) + + if success: + return True, subproto + else: + return False, None + + +def _create_sec_websocket_key(): + randomness = os.urandom(16) + return base64encode(randomness).decode('utf-8').strip() diff --git a/src/lib/websocket/_http.py b/src/lib/websocket/_http.py new file mode 100644 index 000000000..c3b53f560 --- /dev/null +++ b/src/lib/websocket/_http.py @@ -0,0 +1,256 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" +import errno +import os +import socket +import sys + +import six + +from ._exceptions import * +from ._logging import * +from ._socket import* +from ._ssl_compat import * +from ._url import * + +if six.PY3: + from base64 import encodebytes as base64encode +else: + from base64 import encodestring as base64encode + +__all__ = ["proxy_info", "connect", "read_headers"] + + +class proxy_info(object): + + def __init__(self, **options): + self.host = options.get("http_proxy_host", None) + if self.host: + self.port = options.get("http_proxy_port", 0) + self.auth = options.get("http_proxy_auth", None) + else: + self.port = 0 + self.auth = None + self.no_proxy = options.get("http_no_proxy", None) + + +def connect(url, options, proxy, socket): + hostname, port, resource, is_secure = parse_url(url) + + if socket: + return socket, (hostname, port, resource) + + addrinfo_list, need_tunnel, auth = _get_addrinfo_list( + hostname, port, is_secure, proxy) + if not addrinfo_list: + raise WebSocketException( + "Host not found.: " + hostname + ":" + str(port)) + + sock = None + try: + sock = _open_socket(addrinfo_list, options.sockopt, options.timeout) + if need_tunnel: + sock = _tunnel(sock, hostname, port, auth) + + if is_secure: + if HAVE_SSL: + sock = _ssl_socket(sock, options.sslopt, hostname) + else: + raise WebSocketException("SSL not available.") + + return sock, (hostname, port, resource) + except: + if sock: + sock.close() + raise + + +def _get_addrinfo_list(hostname, port, is_secure, proxy): + phost, pport, pauth = get_proxy_info( + hostname, is_secure, proxy.host, proxy.port, proxy.auth, proxy.no_proxy) + try: + if not phost: + addrinfo_list = socket.getaddrinfo( + hostname, port, 0, 0, socket.SOL_TCP) + return addrinfo_list, False, None + else: + pport = pport and pport or 80 + addrinfo_list = socket.getaddrinfo(phost, pport, 0, 0, socket.SOL_TCP) + return addrinfo_list, True, pauth + except socket.gaierror as e: + raise WebSocketAddressException(e) + + +def _open_socket(addrinfo_list, sockopt, timeout): + err = None + for addrinfo in addrinfo_list: + family, socktype, proto = addrinfo[:3] + sock = socket.socket(family, socktype, proto) + sock.settimeout(timeout) + for opts in DEFAULT_SOCKET_OPTION: + sock.setsockopt(*opts) + for opts in sockopt: + sock.setsockopt(*opts) + + address = addrinfo[4] + try: + sock.connect(address) + except socket.error as error: + error.remote_ip = str(address[0]) + try: + eConnRefused = (errno.ECONNREFUSED, errno.WSAECONNREFUSED) + except: + eConnRefused = (errno.ECONNREFUSED, ) + if error.errno in eConnRefused: + err = error + continue + else: + raise + else: + break + else: + raise err + + return sock + + +def _can_use_sni(): + return six.PY2 and sys.version_info >= (2, 7, 9) or sys.version_info >= (3, 2) + + +def _wrap_sni_socket(sock, sslopt, hostname, check_hostname): + context = ssl.SSLContext(sslopt.get('ssl_version', ssl.PROTOCOL_SSLv23)) + + if sslopt.get('cert_reqs', ssl.CERT_NONE) != ssl.CERT_NONE: + context.load_verify_locations(cafile=sslopt.get('ca_certs', None), capath=sslopt.get('ca_cert_path', None)) + if sslopt.get('certfile', None): + context.load_cert_chain( + sslopt['certfile'], + sslopt.get('keyfile', None), + sslopt.get('password', None), + ) + # see + # https://github.com/liris/websocket-client/commit/b96a2e8fa765753e82eea531adb19716b52ca3ca#commitcomment-10803153 + context.verify_mode = sslopt['cert_reqs'] + if HAVE_CONTEXT_CHECK_HOSTNAME: + context.check_hostname = check_hostname + if 'ciphers' in sslopt: + context.set_ciphers(sslopt['ciphers']) + if 'cert_chain' in sslopt: + certfile, keyfile, password = sslopt['cert_chain'] + context.load_cert_chain(certfile, keyfile, password) + if 'ecdh_curve' in sslopt: + context.set_ecdh_curve(sslopt['ecdh_curve']) + + return context.wrap_socket( + sock, + do_handshake_on_connect=sslopt.get('do_handshake_on_connect', True), + suppress_ragged_eofs=sslopt.get('suppress_ragged_eofs', True), + server_hostname=hostname, + ) + + +def _ssl_socket(sock, user_sslopt, hostname): + sslopt = dict(cert_reqs=ssl.CERT_REQUIRED) + sslopt.update(user_sslopt) + + if os.environ.get('WEBSOCKET_CLIENT_CA_BUNDLE'): + certPath = os.environ.get('WEBSOCKET_CLIENT_CA_BUNDLE') + else: + certPath = os.path.join( + os.path.dirname(__file__), "cacert.pem") + if os.path.isfile(certPath) and user_sslopt.get('ca_certs', None) is None \ + and user_sslopt.get('ca_cert', None) is None: + sslopt['ca_certs'] = certPath + elif os.path.isdir(certPath) and user_sslopt.get('ca_cert_path', None) is None: + sslopt['ca_cert_path'] = certPath + + check_hostname = sslopt["cert_reqs"] != ssl.CERT_NONE and sslopt.pop( + 'check_hostname', True) + + if _can_use_sni(): + sock = _wrap_sni_socket(sock, sslopt, hostname, check_hostname) + else: + sslopt.pop('check_hostname', True) + sock = ssl.wrap_socket(sock, **sslopt) + + if not HAVE_CONTEXT_CHECK_HOSTNAME and check_hostname: + match_hostname(sock.getpeercert(), hostname) + + return sock + + +def _tunnel(sock, host, port, auth): + debug("Connecting proxy...") + connect_header = "CONNECT %s:%d HTTP/1.0\r\n" % (host, port) + # TODO: support digest auth. + if auth and auth[0]: + auth_str = auth[0] + if auth[1]: + auth_str += ":" + auth[1] + encoded_str = base64encode(auth_str.encode()).strip().decode() + connect_header += "Proxy-Authorization: Basic %s\r\n" % encoded_str + connect_header += "\r\n" + dump("request header", connect_header) + + send(sock, connect_header) + + try: + status, resp_headers, status_message = read_headers(sock) + except Exception as e: + raise WebSocketProxyException(str(e)) + + if status != 200: + raise WebSocketProxyException( + "failed CONNECT via proxy status: %r" % status) + + return sock + + +def read_headers(sock): + status = None + status_message = None + headers = {} + trace("--- response header ---") + + while True: + line = recv_line(sock) + line = line.decode('utf-8').strip() + if not line: + break + trace(line) + if not status: + + status_info = line.split(" ", 2) + status = int(status_info[1]) + status_message = status_info[2] + else: + kv = line.split(":", 1) + if len(kv) == 2: + key, value = kv + headers[key.lower()] = value.strip() + else: + raise WebSocketException("Invalid header") + + trace("-----------------------") + + return status, headers, status_message diff --git a/src/lib/websocket/_logging.py b/src/lib/websocket/_logging.py new file mode 100644 index 000000000..d406db6a9 --- /dev/null +++ b/src/lib/websocket/_logging.py @@ -0,0 +1,74 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" +import logging + +_logger = logging.getLogger('websocket') +_traceEnabled = False + +__all__ = ["enableTrace", "dump", "error", "warning", "debug", "trace", + "isEnabledForError", "isEnabledForDebug"] + + +def enableTrace(traceable): + """ + turn on/off the traceability. + + traceable: boolean value. if set True, traceability is enabled. + """ + global _traceEnabled + _traceEnabled = traceable + if traceable: + if not _logger.handlers: + _logger.addHandler(logging.StreamHandler()) + _logger.setLevel(logging.DEBUG) + + +def dump(title, message): + if _traceEnabled: + _logger.debug("--- " + title + " ---") + _logger.debug(message) + _logger.debug("-----------------------") + + +def error(msg): + _logger.error(msg) + + +def warning(msg): + _logger.warning(msg) + + +def debug(msg): + _logger.debug(msg) + + +def trace(msg): + if _traceEnabled: + _logger.debug(msg) + + +def isEnabledForError(): + return _logger.isEnabledFor(logging.ERROR) + + +def isEnabledForDebug(): + return _logger.isEnabledFor(logging.DEBUG) diff --git a/src/lib/websocket/_socket.py b/src/lib/websocket/_socket.py new file mode 100644 index 000000000..c84fcf90a --- /dev/null +++ b/src/lib/websocket/_socket.py @@ -0,0 +1,126 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" +import socket + +import six +import sys + +from ._exceptions import * +from ._ssl_compat import * +from ._utils import * + +DEFAULT_SOCKET_OPTION = [(socket.SOL_TCP, socket.TCP_NODELAY, 1)] +if hasattr(socket, "SO_KEEPALIVE"): + DEFAULT_SOCKET_OPTION.append((socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)) +if hasattr(socket, "TCP_KEEPIDLE"): + DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPIDLE, 30)) +if hasattr(socket, "TCP_KEEPINTVL"): + DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPINTVL, 10)) +if hasattr(socket, "TCP_KEEPCNT"): + DEFAULT_SOCKET_OPTION.append((socket.SOL_TCP, socket.TCP_KEEPCNT, 3)) + +_default_timeout = None + +__all__ = ["DEFAULT_SOCKET_OPTION", "sock_opt", "setdefaulttimeout", "getdefaulttimeout", + "recv", "recv_line", "send"] + + +class sock_opt(object): + + def __init__(self, sockopt, sslopt): + if sockopt is None: + sockopt = [] + if sslopt is None: + sslopt = {} + self.sockopt = sockopt + self.sslopt = sslopt + self.timeout = None + + +def setdefaulttimeout(timeout): + """ + Set the global timeout setting to connect. + + timeout: default socket timeout time. This value is second. + """ + global _default_timeout + _default_timeout = timeout + + +def getdefaulttimeout(): + """ + Return the global timeout setting(second) to connect. + """ + return _default_timeout + + +def recv(sock, bufsize): + if not sock: + raise WebSocketConnectionClosedException("socket is already closed.") + + try: + bytes_ = sock.recv(bufsize) + except socket.timeout as e: + message = extract_err_message(e) + raise WebSocketTimeoutException(message) + except SSLError as e: + message = extract_err_message(e) + if isinstance(message, str) and 'timed out' in message: + raise WebSocketTimeoutException(message) + else: + raise + + if not bytes_: + raise WebSocketConnectionClosedException( + "Connection is already closed.") + + return bytes_ + + +def recv_line(sock): + line = [] + while True: + c = recv(sock, 1) + line.append(c) + if c == six.b("\n"): + break + return six.b("").join(line) + + +def send(sock, data): + if isinstance(data, six.text_type): + data = data.encode('utf-8') + + if not sock: + raise WebSocketConnectionClosedException("socket is already closed.") + + try: + return sock.send(data) + except socket.timeout as e: + message = extract_err_message(e) + raise WebSocketTimeoutException(message) + except Exception as e: + message = extract_err_message(e) + if isinstance(message, str) and "timed out" in message: + raise WebSocketTimeoutException(message) + else: + raise diff --git a/src/lib/websocket/_ssl_compat.py b/src/lib/websocket/_ssl_compat.py new file mode 100644 index 000000000..030481628 --- /dev/null +++ b/src/lib/websocket/_ssl_compat.py @@ -0,0 +1,44 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" +__all__ = ["HAVE_SSL", "ssl", "SSLError"] + +try: + import ssl + from ssl import SSLError + if hasattr(ssl, 'SSLContext') and hasattr(ssl.SSLContext, 'check_hostname'): + HAVE_CONTEXT_CHECK_HOSTNAME = True + else: + HAVE_CONTEXT_CHECK_HOSTNAME = False + if hasattr(ssl, "match_hostname"): + from ssl import match_hostname + else: + from backports.ssl_match_hostname import match_hostname + __all__.append("match_hostname") + __all__.append("HAVE_CONTEXT_CHECK_HOSTNAME") + + HAVE_SSL = True +except ImportError: + # dummy class of SSLError for ssl none-support environment. + class SSLError(Exception): + pass + + HAVE_SSL = False diff --git a/src/lib/websocket/_url.py b/src/lib/websocket/_url.py new file mode 100644 index 000000000..f7bdf3467 --- /dev/null +++ b/src/lib/websocket/_url.py @@ -0,0 +1,160 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" + +import os +import socket +import struct + +from six.moves.urllib.parse import urlparse + + +__all__ = ["parse_url", "get_proxy_info"] + + +def parse_url(url): + """ + parse url and the result is tuple of + (hostname, port, resource path and the flag of secure mode) + + url: url string. + """ + if ":" not in url: + raise ValueError("url is invalid") + + scheme, url = url.split(":", 1) + + parsed = urlparse(url, scheme="ws") + if parsed.hostname: + hostname = parsed.hostname + else: + raise ValueError("hostname is invalid") + port = 0 + if parsed.port: + port = parsed.port + + is_secure = False + if scheme == "ws": + if not port: + port = 80 + elif scheme == "wss": + is_secure = True + if not port: + port = 443 + else: + raise ValueError("scheme %s is invalid" % scheme) + + if parsed.path: + resource = parsed.path + else: + resource = "/" + + if parsed.query: + resource += "?" + parsed.query + + return hostname, port, resource, is_secure + + +DEFAULT_NO_PROXY_HOST = ["localhost", "127.0.0.1"] + + +def _is_ip_address(addr): + try: + socket.inet_aton(addr) + except socket.error: + return False + else: + return True + + +def _is_subnet_address(hostname): + try: + addr, netmask = hostname.split("/") + return _is_ip_address(addr) and 0 <= int(netmask) < 32 + except ValueError: + return False + + +def _is_address_in_network(ip, net): + ipaddr = struct.unpack('I', socket.inet_aton(ip))[0] + netaddr, bits = net.split('/') + netmask = struct.unpack('I', socket.inet_aton(netaddr))[0] & ((2 << int(bits) - 1) - 1) + return ipaddr & netmask == netmask + + +def _is_no_proxy_host(hostname, no_proxy): + if not no_proxy: + v = os.environ.get("no_proxy", "").replace(" ", "") + no_proxy = v.split(",") + if not no_proxy: + no_proxy = DEFAULT_NO_PROXY_HOST + + if hostname in no_proxy: + return True + elif _is_ip_address(hostname): + return any([_is_address_in_network(hostname, subnet) for subnet in no_proxy if _is_subnet_address(subnet)]) + + return False + + +def get_proxy_info( + hostname, is_secure, proxy_host=None, proxy_port=0, proxy_auth=None, + no_proxy=None): + """ + try to retrieve proxy host and port from environment + if not provided in options. + result is (proxy_host, proxy_port, proxy_auth). + proxy_auth is tuple of username and password + of proxy authentication information. + + hostname: websocket server name. + + is_secure: is the connection secure? (wss) + looks for "https_proxy" in env + before falling back to "http_proxy" + + options: "http_proxy_host" - http proxy host name. + "http_proxy_port" - http proxy port. + "http_no_proxy" - host names, which doesn't use proxy. + "http_proxy_auth" - http proxy auth information. + tuple of username and password. + default is None + """ + if _is_no_proxy_host(hostname, no_proxy): + return None, 0, None + + if proxy_host: + port = proxy_port + auth = proxy_auth + return proxy_host, port, auth + + env_keys = ["http_proxy"] + if is_secure: + env_keys.insert(0, "https_proxy") + + for key in env_keys: + value = os.environ.get(key, None) + if value: + proxy = urlparse(value) + auth = (proxy.username, proxy.password) if proxy.username else None + return proxy.hostname, proxy.port, auth + + return None, 0, None diff --git a/src/lib/websocket/_utils.py b/src/lib/websocket/_utils.py new file mode 100644 index 000000000..399fb89d9 --- /dev/null +++ b/src/lib/websocket/_utils.py @@ -0,0 +1,105 @@ +""" +websocket - WebSocket client library for Python + +Copyright (C) 2010 Hiroki Ohtani(liris) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, + Boston, MA 02110-1335 USA + +""" +import six + +__all__ = ["NoLock", "validate_utf8", "extract_err_message"] + + +class NoLock(object): + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + pass + +try: + # If wsaccel is available we use compiled routines to validate UTF-8 + # strings. + from wsaccel.utf8validator import Utf8Validator + + def _validate_utf8(utfbytes): + return Utf8Validator().validate(utfbytes)[0] + +except ImportError: + # UTF-8 validator + # python implementation of http://bjoern.hoehrmann.de/utf-8/decoder/dfa/ + + _UTF8_ACCEPT = 0 + _UTF8_REJECT = 12 + + _UTF8D = [ + # The first part of the table maps bytes to character classes that + # to reduce the size of the transition table and create bitmasks. + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, 9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, + 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, + 8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, + 10,3,3,3,3,3,3,3,3,3,3,3,3,4,3,3, 11,6,6,6,5,8,8,8,8,8,8,8,8,8,8,8, + + # The second part is a transition table that maps a combination + # of a state of the automaton and a character class to a state. + 0,12,24,36,60,96,84,12,12,12,48,72, 12,12,12,12,12,12,12,12,12,12,12,12, + 12, 0,12,12,12,12,12, 0,12, 0,12,12, 12,24,12,12,12,12,12,24,12,24,12,12, + 12,12,12,12,12,12,12,24,12,12,12,12, 12,24,12,12,12,12,12,12,12,24,12,12, + 12,12,12,12,12,12,12,36,12,36,12,12, 12,36,12,12,12,12,12,36,12,36,12,12, + 12,36,12,12,12,12,12,12,12,12,12,12, ] + + def _decode(state, codep, ch): + tp = _UTF8D[ch] + + codep = (ch & 0x3f) | (codep << 6) if ( + state != _UTF8_ACCEPT) else (0xff >> tp) & ch + state = _UTF8D[256 + state + tp] + + return state, codep + + def _validate_utf8(utfbytes): + state = _UTF8_ACCEPT + codep = 0 + for i in utfbytes: + if six.PY2: + i = ord(i) + state, codep = _decode(state, codep, i) + if state == _UTF8_REJECT: + return False + + return True + + +def validate_utf8(utfbytes): + """ + validate utf8 byte string. + utfbytes: utf byte string to check. + return value: if valid utf8 string, return true. Otherwise, return false. + """ + return _validate_utf8(utfbytes) + + +def extract_err_message(exception): + if exception.args: + return exception.args[0] + else: + return None diff --git a/src/lib/win_inet_pton/__init__.py b/src/lib/win_inet_pton/__init__.py new file mode 100644 index 000000000..12aaf46c4 --- /dev/null +++ b/src/lib/win_inet_pton/__init__.py @@ -0,0 +1,84 @@ +# This software released into the public domain. Anyone is free to copy, +# modify, publish, use, compile, sell, or distribute this software, +# either in source code form or as a compiled binary, for any purpose, +# commercial or non-commercial, and by any means. + +import socket +import ctypes +import os + + +class sockaddr(ctypes.Structure): + _fields_ = [("sa_family", ctypes.c_short), + ("__pad1", ctypes.c_ushort), + ("ipv4_addr", ctypes.c_byte * 4), + ("ipv6_addr", ctypes.c_byte * 16), + ("__pad2", ctypes.c_ulong)] + +if hasattr(ctypes, 'windll'): + WSAStringToAddressA = ctypes.windll.ws2_32.WSAStringToAddressA + WSAAddressToStringA = ctypes.windll.ws2_32.WSAAddressToStringA +else: + def not_windows(): + raise SystemError( + "Invalid platform. ctypes.windll must be available." + ) + WSAStringToAddressA = not_windows + WSAAddressToStringA = not_windows + + +def inet_pton(address_family, ip_string): + addr = sockaddr() + addr.sa_family = address_family + addr_size = ctypes.c_int(ctypes.sizeof(addr)) + + if WSAStringToAddressA( + ip_string, + address_family, + None, + ctypes.byref(addr), + ctypes.byref(addr_size) + ) != 0: + raise socket.error(ctypes.FormatError()) + + if address_family == socket.AF_INET: + return ctypes.string_at(addr.ipv4_addr, 4) + if address_family == socket.AF_INET6: + return ctypes.string_at(addr.ipv6_addr, 16) + + raise socket.error('unknown address family') + + +def inet_ntop(address_family, packed_ip): + addr = sockaddr() + addr.sa_family = address_family + addr_size = ctypes.c_int(ctypes.sizeof(addr)) + ip_string = ctypes.create_string_buffer(128) + ip_string_size = ctypes.c_int(ctypes.sizeof(ip_string)) + + if address_family == socket.AF_INET: + if len(packed_ip) != ctypes.sizeof(addr.ipv4_addr): + raise socket.error('packed IP wrong length for inet_ntoa') + ctypes.memmove(addr.ipv4_addr, packed_ip, 4) + elif address_family == socket.AF_INET6: + if len(packed_ip) != ctypes.sizeof(addr.ipv6_addr): + raise socket.error('packed IP wrong length for inet_ntoa') + ctypes.memmove(addr.ipv6_addr, packed_ip, 16) + else: + raise socket.error('unknown address family') + + if WSAAddressToStringA( + ctypes.byref(addr), + addr_size, + None, + ip_string, + ctypes.byref(ip_string_size) + ) != 0: + raise socket.error(ctypes.FormatError()) + + return ip_string[:ip_string_size.value - 1] + +# Adding our two functions to the socket library +if os.name == 'nt': + socket.inet_pton = inet_pton + socket.inet_ntop = inet_ntop diff --git a/src/main.py b/src/main.py index a94b6e0e8..0a05159f3 100644 --- a/src/main.py +++ b/src/main.py @@ -20,6 +20,7 @@ # Not thread: pyfilesystem and systray icon, Not subprocess: Gevent 1.1+ update_after_shutdown = False # If set True then update and restart zeronet after main loop ended +restart_after_shutdown = False # If set True then restart zeronet after main loop ended # Load config from Config import config @@ -27,68 +28,43 @@ if not config.arguments: # Config parse failed, show the help screen and exit config.parse() -# Create necessary files and dirs -if not os.path.isdir(config.log_dir): - os.mkdir(config.log_dir) +config.initLogging() + if not os.path.isdir(config.data_dir): os.mkdir(config.data_dir) + try: + os.chmod(config.data_dir, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR) + except Exception as err: + print "Can't change permission of %s: %s" % (config.data_dir, err) + if not os.path.isfile("%s/sites.json" % config.data_dir): open("%s/sites.json" % config.data_dir, "w").write("{}") - os.chmod("%s/sites.json" % config.data_dir, stat.S_IRUSR | stat.S_IWUSR) if not os.path.isfile("%s/users.json" % config.data_dir): open("%s/users.json" % config.data_dir, "w").write("{}") - os.chmod("%s/users.json" % config.data_dir, stat.S_IRUSR | stat.S_IWUSR) -# Setup logging if config.action == "main": from util import helper - log_file_path = "%s/debug.log" % config.log_dir try: lock = helper.openLocked("%s/lock.pid" % config.data_dir, "w") lock.write("%s" % os.getpid()) except IOError as err: print "Can't open lock file, your ZeroNet client is probably already running, exiting... (%s)" % err - if config.open_browser: + if config.open_browser and config.open_browser != "False": print "Opening browser: %s...", config.open_browser import webbrowser - if config.open_browser == "default_browser": - browser = webbrowser.get() - else: - browser = webbrowser.get(config.open_browser) - browser.open("http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage), new=2) + try: + if config.open_browser == "default_browser": + browser = webbrowser.get() + else: + browser = webbrowser.get(config.open_browser) + browser.open("http://%s:%s/%s" % (config.ui_ip if config.ui_ip != "*" else "127.0.0.1", config.ui_port, config.homepage), new=2) + except Exception as err: + print "Error starting browser: %s" % err sys.exit() - if os.path.isfile("%s/debug.log" % config.log_dir): # Simple logrotate - if os.path.isfile("%s/debug-last.log" % config.log_dir): - os.unlink("%s/debug-last.log" % config.log_dir) - os.rename("%s/debug.log" % config.log_dir, "%s/debug-last.log" % config.log_dir) - logging.basicConfig( - format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', - level=logging.DEBUG, stream=open(log_file_path, "a") - ) -else: - log_file_path = "%s/cmd.log" % config.log_dir - logging.basicConfig( - format='[%(asctime)s] %(levelname)-8s %(name)s %(message)s', - level=logging.DEBUG, stream=open(log_file_path, "w") - ) - -# Console logger -console_log = logging.StreamHandler() -if config.action == "main": # Add time if main action - console_log.setFormatter(logging.Formatter('[%(asctime)s] %(name)s %(message)s', "%H:%M:%S")) -else: - console_log.setFormatter(logging.Formatter('%(name)s %(message)s', "%H:%M:%S")) - -logging.getLogger('').addHandler(console_log) # Add console logger -logging.getLogger('').name = "-" # Remove root prefix # Debug dependent configuration from Debug import DebugHook -if config.debug: - console_log.setLevel(logging.DEBUG) # Display everything to console -else: - console_log.setLevel(logging.INFO) # Display only important info to console # Load plugins from Plugin import PluginManager @@ -99,24 +75,38 @@ # Log current config logging.debug("Config: %s" % config) +# Modify stack size on special hardwares +if config.stack_size: + import threading + threading.stack_size(config.stack_size) + # Use pure-python implementation of msgpack to save CPU if config.msgpack_purepython: os.environ["MSGPACK_PUREPYTHON"] = "True" -# Socks Proxy monkey patch +# Socket monkey patch if config.proxy: from util import SocksProxy import urllib2 logging.info("Patching sockets to socks proxy: %s" % config.proxy) - config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost + if config.fileserver_ip == "*": + config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost SocksProxy.monkeyPatch(*config.proxy.split(":")) elif config.tor == "always": from util import SocksProxy import urllib2 logging.info("Patching sockets to tor socks proxy: %s" % config.tor_proxy) - config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost + if config.fileserver_ip == "*": + config.fileserver_ip = '127.0.0.1' # Do not accept connections anywhere but localhost SocksProxy.monkeyPatch(*config.tor_proxy.split(":")) config.disable_udp = True +elif config.bind: + bind = config.bind + if ":" not in config.bind: + bind += ":0" + from util import helper + helper.socketBindMonkeyPatch(*bind.split(":")) + # -- Actions -- @@ -126,7 +116,9 @@ def call(self, function_name, kwargs): logging.info("Version: %s r%s, Python %s, Gevent: %s" % (config.version, config.rev, sys.version, gevent.__version__)) func = getattr(self, function_name, None) - func(**kwargs) + back = func(**kwargs) + if back: + print back # Default action: Start serving UiServer and FileServer def main(self): @@ -137,6 +129,7 @@ def main(self): file_server = FileServer() logging.info("Creating UiServer....") ui_server = UiServer() + file_server.ui_server = ui_server logging.info("Removing old SSL certs...") from Crypt import CryptConnection @@ -144,6 +137,7 @@ def main(self): logging.info("Starting servers....") gevent.joinall([gevent.spawn(ui_server.start), gevent.spawn(file_server.start)]) + logging.info("All server stopped") # Site commands @@ -166,6 +160,9 @@ def siteCreate(self): logging.info("Creating directory structure...") from Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + os.mkdir("%s/%s" % (config.data_dir, address)) open("%s/%s/index.html" % (config.data_dir, address), "w").write("Hello %s!" % address) @@ -177,8 +174,11 @@ def siteCreate(self): logging.info("Site created!") - def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False): + def siteSign(self, address, privatekey=None, inner_path="content.json", publish=False, remove_missing_optional=False): from Site import Site + from Site import SiteManager + from Debug import Debug + SiteManager.site_manager.load() logging.info("Signing site: %s..." % address) site = Site(address, allow_create=False) @@ -194,14 +194,20 @@ def siteSign(self, address, privatekey=None, inner_path="content.json", publish= # Not found in users.json, ask from console import getpass privatekey = getpass.getpass("Private key (input hidden):") - diffs = site.content_manager.getDiffs(inner_path) - succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True) + try: + succ = site.content_manager.sign(inner_path=inner_path, privatekey=privatekey, update_changed_files=True, remove_missing_optional=remove_missing_optional) + except Exception, err: + logging.error("Sign error: %s" % Debug.formatException(err)) + succ = False if succ and publish: - self.sitePublish(address, inner_path=inner_path, diffs=diffs) + self.sitePublish(address, inner_path=inner_path) def siteVerify(self, address): import time from Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + s = time.time() logging.info("Verifing site: %s..." % address) site = Site(address) @@ -210,18 +216,22 @@ def siteVerify(self, address): for content_inner_path in site.content_manager.contents: s = time.time() logging.info("Verifing %s signature..." % content_inner_path) - file_correct = site.content_manager.verifyFile( - content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False - ) + try: + file_correct = site.content_manager.verifyFile( + content_inner_path, site.storage.open(content_inner_path, "rb"), ignore_same=False + ) + except Exception, err: + file_correct = False + if file_correct is True: logging.info("[OK] %s (Done in %.3fs)" % (content_inner_path, time.time() - s)) else: - logging.error("[ERROR] %s: invalid file!" % content_inner_path) + logging.error("[ERROR] %s: invalid file: %s!" % (content_inner_path, err)) raw_input("Continue?") bad_files += content_inner_path logging.info("Verifying site files...") - bad_files += site.storage.verifyFiles() + bad_files += site.storage.verifyFiles()["bad_files"] if not bad_files: logging.info("[OK] All file sha512sum matches! (%.3fs)" % (time.time() - s)) else: @@ -231,6 +241,7 @@ def dbRebuild(self, address): from Site import Site from Site import SiteManager SiteManager.site_manager.load() + logging.info("Rebuilding site sql cache: %s..." % address) site = SiteManager.site_manager.get(address) s = time.time() @@ -239,6 +250,9 @@ def dbRebuild(self, address): def dbQuery(self, address, query): from Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + import json site = Site(address) result = [] @@ -248,6 +262,15 @@ def dbQuery(self, address, query): def siteAnnounce(self, address): from Site.Site import Site + from Site import SiteManager + SiteManager.site_manager.load() + + logging.info("Opening a simple connection server") + global file_server + from File import FileServer + file_server = FileServer("127.0.0.1", 1234) + file_server.start() + logging.info("Announcing site %s to tracker..." % address) site = Site(address) @@ -258,11 +281,14 @@ def siteAnnounce(self, address): def siteDownload(self, address): from Site import Site + from Site import SiteManager + SiteManager.site_manager.load() logging.info("Opening a simple connection server") global file_server - from Connection import ConnectionServer - file_server = ConnectionServer("127.0.0.1", 1234) + from File import FileServer + file_server = FileServer("127.0.0.1", 1234) + file_server_thread = gevent.spawn(file_server.start, check_sites=False) site = Site(address) @@ -279,12 +305,13 @@ def onComplete(evt): print "Downloading..." site.downloadContent("content.json", check_modifications=True) - print on_completed.get() print "Downloaded in %.3fs" % (time.time()-s) def siteNeedFile(self, address, inner_path): from Site import Site + from Site import SiteManager + SiteManager.site_manager.load() def checker(): while 1: @@ -295,38 +322,67 @@ def checker(): logging.info("Opening a simple connection server") global file_server - from Connection import ConnectionServer - file_server = ConnectionServer("127.0.0.1", 1234) + from File import FileServer + file_server = FileServer("127.0.0.1", 1234) + file_server_thread = gevent.spawn(file_server.start, check_sites=False) site = Site(address) site.announce() print site.needFile(inner_path, update=True) - def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json", diffs={}): + def siteCmd(self, address, cmd, parameters): + import json + from Site import SiteManager + + site = SiteManager.site_manager.get(address) + + ws = self.getWebsocket(site) + ws.send(json.dumps({"cmd": cmd, "params": parameters, "id": 1})) + res = json.loads(ws.recv()) + if "result" in res: + return res["result"] + else: + return res + + def getWebsocket(self, site): + from lib import websocket + ws = websocket.create_connection("ws://%s:%s/Websocket?wrapper_key=%s" % (config.ui_ip, config.ui_port, site.settings["wrapper_key"])) + return ws + + def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="content.json"): global file_server from Site import Site + from Site import SiteManager from File import FileServer # We need fileserver to handle incoming file requests from Peer import Peer - + file_server = FileServer() + site = SiteManager.site_manager.get(address) logging.info("Loading site...") - site = Site(address, allow_create=False) site.settings["serving"] = True # Serving the site even if its disabled - logging.info("Creating FileServer....") - file_server = FileServer() - site.connection_server = file_server - file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity - time.sleep(0.001) + try: + ws = self.getWebsocket(site) + logging.info("Sending siteReload") + self.siteCmd(address, "siteReload", inner_path) + + logging.info("Sending sitePublish") + self.siteCmd(address, "sitePublish", {"inner_path": inner_path, "sign": False}) + logging.info("Done.") + + except Exception as err: + logging.info("Can't connect to local websocket client: %s" % err) + logging.info("Creating FileServer....") + file_server_thread = gevent.spawn(file_server.start, check_sites=False) # Dont check every site integrity + time.sleep(0.001) - if not file_server_thread.ready(): # Started fileserver - file_server.openport() + file_server.portCheck() if peer_ip: # Announce ip specificed site.addPeer(peer_ip, peer_port) else: # Just ask the tracker logging.info("Gathering peers from tracker") site.announce() # Gather peers - published = site.publish(5, inner_path, diffs=diffs) # Push to peers + published = site.publish(5, inner_path) # Push to peers if published > 0: time.sleep(3) logging.info("Serving files (max 60s)...") @@ -334,14 +390,6 @@ def sitePublish(self, address, peer_ip=None, peer_port=15441, inner_path="conten logging.info("Done.") else: logging.info("No peers found, sitePublish command only works if you already have visitors serving your site") - else: - # Already running, notify local client on new content - logging.info("Sending siteReload") - my_peer = Peer("127.0.0.1", config.fileserver_port) - logging.info(my_peer.request("siteReload", {"site": site.address, "inner_path": inner_path})) - logging.info("Sending sitePublish") - logging.info(my_peer.request("sitePublish", {"site": site.address, "inner_path": inner_path, "diffs": diffs})) - logging.info("Done.") # Crypto commands def cryptPrivatekeyToAddress(self, privatekey=None): @@ -356,20 +404,41 @@ def cryptSign(self, message, privatekey): from Crypt import CryptBitcoin print CryptBitcoin.sign(message, privatekey) + def cryptVerify(self, message, sign, address): + from Crypt import CryptBitcoin + print CryptBitcoin.verify(message, address, sign) + + def cryptGetPrivatekey(self, master_seed, site_address_index=None): + from Crypt import CryptBitcoin + if len(master_seed) != 64: + logging.error("Error: Invalid master seed length: %s (required: 64)" % len(master_seed)) + return False + privatekey = CryptBitcoin.hdPrivatekey(master_seed, site_address_index) + print "Requested private key: %s" % privatekey + # Peer def peerPing(self, peer_ip, peer_port=None): if not peer_port: - peer_port = config.fileserver_port + peer_port = 15441 logging.info("Opening a simple connection server") global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) + file_server.start(check_connections=False) from Crypt import CryptConnection CryptConnection.manager.loadCerts() from Peer import Peer logging.info("Pinging 5 times peer: %s:%s..." % (peer_ip, int(peer_port))) + s = time.time() peer = Peer(peer_ip, peer_port) + peer.connect() + + if not peer.connection: + print "Error: Can't connect to peer (connection error: %s)" % peer.connection_error + return False + print "Connection time: %.3fs (connection error: %s)" % (time.time() - s, peer.connection_error) + for i in range(5): print "Response time: %.3fs (crypt: %s)" % (peer.ping(), peer.connection.crypt) time.sleep(1) @@ -385,6 +454,7 @@ def peerGetFile(self, peer_ip, peer_port, site, filename, benchmark=False): global file_server from Connection import ConnectionServer file_server = ConnectionServer("127.0.0.1", 1234) + file_server.start(check_connections=False) from Crypt import CryptConnection CryptConnection.manager.loadCerts() @@ -405,6 +475,7 @@ def peerCmd(self, peer_ip, peer_port, cmd, parameters): global file_server from Connection import ConnectionServer file_server = ConnectionServer() + file_server.start(check_connections=False) from Crypt import CryptConnection CryptConnection.manager.loadCerts() @@ -416,7 +487,15 @@ def peerCmd(self, peer_ip, peer_port, cmd, parameters): parameters = json.loads(parameters.replace("'", '"')) else: parameters = {} - logging.info("Response: %s" % peer.request(cmd, parameters)) + try: + res = peer.request(cmd, parameters) + print json.dumps(res, indent=2, ensure_ascii=False) + except Exception, err: + print "Unknown response (%s): %s" % (err, res) + + def getConfig(self): + import json + print json.dumps(config.getServerInfo(), indent=2, ensure_ascii=False) actions = Actions() diff --git a/src/util/Noparallel.py b/src/util/Noparallel.py index dc4b565eb..9f9d3132e 100644 --- a/src/util/Noparallel.py +++ b/src/util/Noparallel.py @@ -4,22 +4,34 @@ class Noparallel(object): # Only allow function running once in same time - def __init__(self, blocking=True, ignore_args=False): + def __init__(self, blocking=True, ignore_args=False, ignore_class=False, queue=False): self.threads = {} self.blocking = blocking # Blocking: Acts like normal function else thread returned + self.queue = queue + self.queued = False self.ignore_args = ignore_args + self.ignore_class = ignore_class def __call__(self, func): def wrapper(*args, **kwargs): - if self.ignore_args: - key = func # Unique key only by function + if self.ignore_class: + key = func # Unique key only by function and class object + elif self.ignore_args: + key = (func, args[0]) # Unique key only by function and class object else: key = (func, tuple(args), str(kwargs)) # Unique key for function including parameters if key in self.threads: # Thread already running (if using blocking mode) + if self.queue: + self.queued = True thread = self.threads[key] if self.blocking: thread.join() # Blocking until its finished - return thread.value # Return the value + if self.queued: + self.queued = False + return wrapper(*args, **kwargs) # Run again after the end + else: + return thread.value # Return the value + else: # No blocking if thread.ready(): # Its finished, create a new thread = gevent.spawn(func, *args, **kwargs) diff --git a/src/util/Platform.py b/src/util/Platform.py index f7b07e513..19477649d 100644 --- a/src/util/Platform.py +++ b/src/util/Platform.py @@ -15,8 +15,8 @@ def setMaxfilesopened(limit): import resource soft, hard = resource.getrlimit(resource.RLIMIT_NOFILE) if soft < limit: - logging.debug("Current RLIMIT_NOFILE: %s, changing to %s..." % (soft, limit)) - resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) + logging.debug("Current RLIMIT_NOFILE: %s (max: %s), changing to %s..." % (soft, hard, limit)) + resource.setrlimit(resource.RLIMIT_NOFILE, (limit, hard)) return True except Exception, err: diff --git a/src/util/QueryJson.py b/src/util/QueryJson.py index 0eb56633c..6b1f93720 100644 --- a/src/util/QueryJson.py +++ b/src/util/QueryJson.py @@ -13,12 +13,15 @@ def queryFile(file_path, filter_path, filter_key=None, filter_val=None): if not data: return - for row in data: - if filter_val: # Filter by value - if row[filter_key] == filter_val: + if type(data) == list: + for row in data: + if filter_val: # Filter by value + if row[filter_key] == filter_val: + back.append(row) + else: back.append(row) - else: - back.append(row) + else: + back.append({"value": data}) return back diff --git a/src/util/RateLimit.py b/src/util/RateLimit.py index 2f1cf2d96..35b592caf 100644 --- a/src/util/RateLimit.py +++ b/src/util/RateLimit.py @@ -37,7 +37,7 @@ def delayLeft(event, allowed_again=10): def callQueue(event): func, args, kwargs, thread = queue_db[event] log.debug("Calling: %s" % event) - del called_db[event] + called(event) del queue_db[event] return func(*args, **kwargs) @@ -78,8 +78,7 @@ def call(event, allowed_again=10, func=None, *args, **kwargs): called(event, time_left) time.sleep(time_left) back = func(*args, **kwargs) - if event in called_db: - del called_db[event] + called(event) return back diff --git a/src/util/SafeRe.py b/src/util/SafeRe.py new file mode 100644 index 000000000..6018e2d3b --- /dev/null +++ b/src/util/SafeRe.py @@ -0,0 +1,32 @@ +import re + + +class UnsafePatternError(Exception): + pass + +cached_patterns = {} + + +def isSafePattern(pattern): + if len(pattern) > 255: + raise UnsafePatternError("Pattern too long: %s characters in %s" % (len(pattern), pattern)) + + unsafe_pattern_match = re.search(r"[^\.][\*\{\+]", pattern) # Always should be "." before "*{+" characters to avoid ReDoS + if unsafe_pattern_match: + raise UnsafePatternError("Potentially unsafe part of the pattern: %s in %s" % (unsafe_pattern_match.group(0), pattern)) + + repetitions = re.findall(r"\.[\*\{\+]", pattern) + if len(repetitions) >= 10: + raise UnsafePatternError("More than 10 repetitions of %s in %s" % (repetitions[0], pattern)) + + return True + + +def match(pattern, *args, **kwargs): + cached_pattern = cached_patterns.get(pattern) + if cached_pattern: + return cached_pattern.match(*args, **kwargs) + else: + if isSafePattern(pattern): + cached_patterns[pattern] = re.compile(pattern) + return cached_patterns[pattern].match(*args, **kwargs) diff --git a/src/util/SocksProxy.py b/src/util/SocksProxy.py index 7a99e2aa2..4c3571349 100644 --- a/src/util/SocksProxy.py +++ b/src/util/SocksProxy.py @@ -1,10 +1,10 @@ import socket from lib.PySocks import socks - +from Config import config def create_connection(address, timeout=None, source_address=None): - if address == "127.0.0.1": + if address in config.ip_local: sock = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM) sock.connect(address) else: diff --git a/src/util/SslPatch.py b/src/util/SslPatch.py index 1daa73542..a1e5f33fa 100644 --- a/src/util/SslPatch.py +++ b/src/util/SslPatch.py @@ -3,31 +3,57 @@ import logging import os +import sys +import ctypes +import ctypes.util from Config import config +def getLibraryPath(): + if sys.platform.startswith("win"): + lib_path = os.path.dirname(os.path.abspath(__file__)) + "/../lib/opensslVerify/libeay32.dll" + elif sys.platform == "cygwin": + lib_path = "/bin/cygcrypto-1.0.0.dll" + elif os.path.isfile("../lib/libcrypto.so"): # ZeroBundle OSX + lib_path = "../lib/libcrypto.so" + elif os.path.isfile("/opt/lib/libcrypto.so.1.0.0"): # For optware and entware + lib_path = "/opt/lib/libcrypto.so.1.0.0" + else: + lib_path = "/usr/local/ssl/lib/libcrypto.so" + + if os.path.isfile(lib_path): + return lib_path + + if "ANDROID_APP_PATH" in os.environ: + try: + lib_dir = os.environ["ANDROID_APP_PATH"] + "/../../lib" + return [lib for lib in os.listdir(lib_dir) if "crypto" in lib][0] + except Exception, err: + logging.debug("OpenSSL lib not found in: %s (%s)" % (lib_dir, err)) + + if "LD_LIBRARY_PATH" in os.environ: + lib_dir_paths = os.environ["LD_LIBRARY_PATH"].split(":") + for path in lib_dir_paths: + try: + return [lib for lib in os.listdir(path) if "libcrypto.so.1.0" in lib][0] + except Exception, err: + logging.debug("OpenSSL lib not found in: %s (%s)" % (path, err)) + + return ( + ctypes.util.find_library('ssl.so.1.0') or ctypes.util.find_library('ssl') or + ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') or 'libeay32' + ) + + def openLibrary(): - import ctypes - import ctypes.util - try: - if sys.platform.startswith("win"): - dll_path = "src/lib/opensslVerify/libeay32.dll" - elif sys.platform == "cygwin": - dll_path = "/bin/cygcrypto-1.0.0.dll" - else: - dll_path = "/usr/local/ssl/lib/libcrypto.so" - ssl = ctypes.CDLL(dll_path, ctypes.RTLD_GLOBAL) - assert ssl - except: - dll_path = ctypes.util.find_library('ssl') or ctypes.util.find_library('crypto') or ctypes.util.find_library('libcrypto') - ssl = ctypes.CDLL(dll_path or 'libeay32', ctypes.RTLD_GLOBAL) - return ssl + lib_path = getLibraryPath() or "libeay32" + logging.debug("Opening %s..." % lib_path) + ssl_lib = ctypes.CDLL(lib_path, ctypes.RTLD_GLOBAL) + return ssl_lib def disableSSLCompression(): - import ctypes - import ctypes.util try: openssl = openLibrary() openssl.SSL_COMP_get_compression_methods.restype = ctypes.c_void_p @@ -83,7 +109,9 @@ def new_sslwrap( cert_reqs=__ssl__.CERT_NONE, ssl_version=__ssl__.PROTOCOL_SSLv23, ca_certs=None, ciphers=None ): - context = __ssl__.SSLContext(ssl_version) + context = __ssl__.SSLContext(__ssl__.PROTOCOL_SSLv23) + context.options |= __ssl__.OP_NO_SSLv2 + context.options |= __ssl__.OP_NO_SSLv3 context.verify_mode = cert_reqs or __ssl__.CERT_NONE if ca_certs: context.load_verify_locations(ca_certs) @@ -113,9 +141,8 @@ def new_sslwrap( except Exception, err: pass -# Fix PROTOCOL_SSLv3 not defined -if "PROTOCOL_SSLv3" not in dir(__ssl__): - __ssl__.PROTOCOL_SSLv3 = __ssl__.PROTOCOL_SSLv23 - logging.debug("Redirected PROTOCOL_SSLv3 to PROTOCOL_SSLv23.") +# Redirect insecure SSLv2 and v3 +__ssl__.PROTOCOL_SSLv2 = __ssl__.PROTOCOL_SSLv3 = __ssl__.PROTOCOL_SSLv23 + logging.debug("Python SSL version: %s" % __ssl__.OPENSSL_VERSION) diff --git a/src/util/UpnpPunch.py b/src/util/UpnpPunch.py index 1e47d6a3a..7f14198de 100644 --- a/src/util/UpnpPunch.py +++ b/src/util/UpnpPunch.py @@ -4,8 +4,10 @@ import logging from urlparse import urlparse from xml.dom.minidom import parseString +from xml.parsers.expat import ExpatError from gevent import socket +import gevent # Relevant UPnP spec: # http://www.upnp.org/specs/gw/UPnP-gw-WANIPConnection-v1-Service.pdf @@ -45,7 +47,7 @@ def perform_m_search(local_ip): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.bind((local_ip, 10000)) + sock.bind((local_ip, 0)) sock.sendto(ssdp_request, ('239.255.255.250', 1900)) if local_ip == "127.0.0.1": @@ -82,7 +84,7 @@ def _retrieve_igd_profile(url): Retrieve the device's UPnP profile. """ try: - return urllib2.urlopen(url.geturl(), timeout=5).read() + return urllib2.urlopen(url.geturl(), timeout=5).read().decode('utf-8') except socket.error: raise IGDError('IGD profile query timed out') @@ -100,7 +102,11 @@ def _parse_igd_profile(profile_xml): WANIPConnection or WANPPPConnection and return the 'controlURL' and the service xml schema. """ - dom = parseString(profile_xml) + try: + dom = parseString(profile_xml) + except ExpatError as e: + raise IGDError( + 'Unable to parse IGD reply: {0} \n\n\n {1}'.format(profile_xml, e)) service_types = dom.getElementsByTagName('serviceType') for service in service_types: @@ -123,14 +129,17 @@ def _parse_igd_profile(profile_xml): def _get_local_ips(): local_ips = [] - # get local ip using UDP and a broadcast address - s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) - # Not using because gevents getaddrinfo doesn't like that - # using port 1 as per hobbldygoop's comment about port 0 not working on osx: - # https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928 - s.connect(('239.255.255.250', 1)) - local_ips.append(s.getsockname()[0]) + try: + # get local ip using UDP and a broadcast address + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + # Not using because gevents getaddrinfo doesn't like that + # using port 1 as per hobbldygoop's comment about port 0 not working on osx: + # https://github.com/sirMackk/ZeroNet/commit/fdcd15cf8df0008a2070647d4d28ffedb503fba2#commitcomment-9863928 + s.connect(('239.255.255.250', 1)) + local_ips.append(s.getsockname()[0]) + except: + pass # Get ip by using UDP and a normal address (google dns ip) try: @@ -149,7 +158,9 @@ def _get_local_ips(): # Delete duplicates local_ips = list(set(local_ips)) - logging.debug("Found local ips: %s" % local_ips) + # Probably we looking for an ip starting with 192 + local_ips = sorted(local_ips, key=lambda a: a.startswith("192"), reverse=True) + return local_ips @@ -268,7 +279,7 @@ def _send_requests(messages, location, upnp_schema, control_path): raise UpnpError('Sending requests using UPnP failed.') -def _orchestrate_soap_request(ip, port, msg_fn, desc=None, protos=['TCP', 'UDP']): +def _orchestrate_soap_request(ip, port, msg_fn, desc=None, protos=("TCP", "UDP")): logging.debug("Trying using local ip: %s" % ip) idg_data = _collect_idg_data(ip) @@ -284,25 +295,44 @@ def _communicate_with_igd(port=15441, desc="UpnpPunch", retries=3, fn=_create_open_message, - protos=["TCP", "UDP"]): + protos=("TCP", "UDP")): """ Manage sending a message generated by 'fn'. """ - # Retry every ip 'retries' times - local_ips = _get_local_ips() * retries + local_ips = _get_local_ips() success = False + def job(local_ip): + for retry in range(retries): + try: + _orchestrate_soap_request(local_ip, port, fn, desc, protos) + return True + except Exception as e: + logging.debug('Upnp request using "{0}" failed: {1}'.format(local_ip, e)) + gevent.sleep(1) + return False + + threads = [] + for local_ip in local_ips: - try: - _orchestrate_soap_request(local_ip, port, fn, desc, protos) + job_thread = gevent.spawn(job, local_ip) + threads.append(job_thread) + gevent.sleep(0.1) + if any([thread.value for thread in threads]): success = True break - except (UpnpError, IGDError) as e: - logging.debug('Upnp request using "{0}" failed: {1}'.format( - local_ip, e)) - success = False - continue + + # Wait another 10sec for competition or any positibe result + for _ in range(10): + all_done = all([thread.value is not None for thread in threads]) + any_succeed = any([thread.value for thread in threads]) + if all_done or any_succeed: + break + gevent.sleep(1) + + if any([thread.value for thread in threads]): + success = True if not success: raise UpnpError( @@ -310,7 +340,7 @@ def _communicate_with_igd(port=15441, port, retries)) -def ask_to_open_port(port=15441, desc="UpnpPunch", retries=3, protos=["TCP", "UDP"]): +def ask_to_open_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")): logging.debug("Trying to open port %d." % port) _communicate_with_igd(port=port, desc=desc, @@ -319,28 +349,27 @@ def ask_to_open_port(port=15441, desc="UpnpPunch", retries=3, protos=["TCP", "UD protos=protos) -def ask_to_close_port(port=15441, desc="UpnpPunch", retries=3, protos=["TCP", "UDP"]): +def ask_to_close_port(port=15441, desc="UpnpPunch", retries=3, protos=("TCP", "UDP")): logging.debug("Trying to close port %d." % port) # retries=1 because multiple successes cause 500 response and failure _communicate_with_igd(port=port, desc=desc, - retries=1, + retries=retries, fn=_create_close_message, protos=protos) - if __name__ == "__main__": from gevent import monkey - monkey.patch_socket() + monkey.patch_all() logging.getLogger().setLevel(logging.DEBUG) import time s = time.time() print "Opening port..." - print ask_to_open_port(15443, "ZeroNet", retries=3, protos=["TCP"]) - print "Done in", time.time()-s + print ask_to_open_port(15443, "ZeroNet", protos=["TCP"]) + print "Done in", time.time() - s print "Closing port..." - print ask_to_close_port(15443, "ZeroNet", retries=3, protos=["TCP"]) - print "Done in", time.time()-s + print ask_to_close_port(15443, "ZeroNet", protos=["TCP"]) + print "Done in", time.time() - s diff --git a/src/util/helper.py b/src/util/helper.py index 13439cded..1c9b26c68 100644 --- a/src/util/helper.py +++ b/src/util/helper.py @@ -9,22 +9,25 @@ import base64 import gevent +if "inet_pton" not in dir(socket): + import win_inet_pton + + from Config import config def atomicWrite(dest, content, mode="w"): try: - permissions = stat.S_IMODE(os.lstat(dest).st_mode) - with open(dest + "-new", mode) as f: + with open(dest + "-tmpnew", mode) as f: f.write(content) f.flush() os.fsync(f.fileno()) - if os.path.isfile(dest + "-old"): # Previous incomplete write - os.rename(dest + "-old", dest + "-old-%s" % time.time()) - os.rename(dest, dest + "-old") - os.rename(dest + "-new", dest) - os.chmod(dest, permissions) - os.unlink(dest + "-old") + if os.path.isfile(dest + "-tmpold"): # Previous incomplete write + os.rename(dest + "-tmpold", dest + "-tmpold-%s" % time.time()) + if os.path.isfile(dest): # Rename old file to -tmpold + os.rename(dest, dest + "-tmpold") + os.rename(dest + "-tmpnew", dest) + os.unlink(dest + "-tmpold") # Remove old file return True except Exception, err: from Debug import Debug @@ -32,8 +35,8 @@ def atomicWrite(dest, content, mode="w"): "File %s write failed: %s, reverting..." % (dest, Debug.formatException(err)) ) - if os.path.isfile(dest + "-old") and not os.path.isfile(dest): - os.rename(dest + "-old", dest) + if os.path.isfile(dest + "-tmpold") and not os.path.isfile(dest): + os.rename(dest + "-tmpold", dest) return False @@ -54,7 +57,7 @@ def openLocked(path, mode="w"): def getFreeSpace(): free_space = -1 if "statvfs" in dir(os): # Unix - statvfs = os.statvfs(config.data_dir) + statvfs = os.statvfs(config.data_dir.encode("utf8")) free_space = statvfs.f_frsize * statvfs.f_bavail else: # Windows try: @@ -69,6 +72,13 @@ def getFreeSpace(): return free_space +def sqlquote(value): + if type(value) is int: + return str(value) + else: + return "'%s'" % value.replace("'", "''") + + def shellquote(*args): if len(args) == 1: return '"%s"' % args[0].replace('"', "") @@ -77,27 +87,31 @@ def shellquote(*args): def packPeers(peers): - packed_peers = {"ip4": [], "onion": []} + packed_peers = {"ipv4": [], "ipv6": [], "onion": []} for peer in peers: try: - if peer.ip.endswith(".onion"): - packed_peers["onion"].append(peer.packMyAddress()) - else: - packed_peers["ip4"].append(peer.packMyAddress()) + ip_type = getIpType(peer.ip) + packed_peers[ip_type].append(peer.packMyAddress()) except Exception: logging.error("Error packing peer address: %s" % peer) return packed_peers -# ip, port to packed 6byte format +# ip, port to packed 6byte or 18byte format def packAddress(ip, port): - return socket.inet_aton(ip) + struct.pack("H", port) + if ":" in ip: + return socket.inet_pton(socket.AF_INET6, ip) + struct.pack("H", port) + else: + return socket.inet_aton(ip) + struct.pack("H", port) -# From 6byte format to ip, port +# From 6byte or 18byte format to ip, port def unpackAddress(packed): - assert len(packed) == 6, "Invalid length ip4 packed address: %s" % len(packed) - return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] + if len(packed) == 18: + return socket.inet_ntop(socket.AF_INET6, packed[0:16]), struct.unpack_from("H", packed, 16)[0] + else: + assert len(packed) == 6, "Invalid length ip4 packed address: %s" % len(packed) + return socket.inet_ntoa(packed[0:4]), struct.unpack_from("H", packed, 4)[0] # onion, port to packed 12byte format @@ -112,10 +126,10 @@ def unpackOnionAddress(packed): # Get dir from file -# Return: data/site/content.json -> data/site +# Return: data/site/content.json -> data/site/ def getDirname(path): if "/" in path: - return path[:path.rfind("/") + 1] + return path[:path.rfind("/") + 1].lstrip("/") else: return "" @@ -126,6 +140,17 @@ def getFilename(path): return path[path.rfind("/") + 1:] +def getFilesize(path): + try: + s = os.stat(path) + except: + return None + if stat.S_ISREG(s.st_mode): # Test if it's file + return s.st_size + else: + return None + + # Convert hash to hashid for hashfield def toHashId(hash): return int(hash[0:4], 16) @@ -181,3 +206,97 @@ def timerCaller(secs, func, *args, **kwargs): def timer(secs, func, *args, **kwargs): gevent.spawn_later(secs, timerCaller, secs, func, *args, **kwargs) + + +def create_connection(address, timeout=None, source_address=None): + if address in config.ip_local: + sock = socket.create_connection_original(address, timeout, source_address) + else: + sock = socket.create_connection_original(address, timeout, socket.bind_addr) + return sock + + +def socketBindMonkeyPatch(bind_ip, bind_port): + import socket + logging.info("Monkey patching socket to bind to: %s:%s" % (bind_ip, bind_port)) + socket.bind_addr = (bind_ip, int(bind_port)) + socket.create_connection_original = socket.create_connection + socket.create_connection = create_connection + + +def limitedGzipFile(*args, **kwargs): + import gzip + + class LimitedGzipFile(gzip.GzipFile): + def read(self, size=-1): + return super(LimitedGzipFile, self).read(1024 * 1024 * 25) + return LimitedGzipFile(*args, **kwargs) + + +def avg(items): + if len(items) > 0: + return sum(items) / len(items) + else: + return 0 + + +def isIp(ip): + if ":" in ip: # IPv6 + try: + socket.inet_pton(socket.AF_INET6, ip) + return True + except: + return False + + else: # IPv4 + try: + socket.inet_aton(ip) + return True + except: + return False + + +local_ip_pattern = re.compile(r"^127\.|192\.168\.|10\.|172\.1[6-9]\.|172\.2[0-9]\.|172\.3[0-1]\.|169\.254\.|::1$|fe80") +def isPrivateIp(ip): + return local_ip_pattern.match(ip) + + +def getIpType(ip): + if ip.endswith(".onion"): + return "onion" + elif ":" in ip: + return "ipv6" + else: + return "ipv4" + + +def createSocket(ip, sock_type=socket.SOCK_STREAM): + ip_type = getIpType(ip) + if ip_type == "ipv6": + return socket.socket(socket.AF_INET6, sock_type) + else: + return socket.socket(socket.AF_INET, sock_type) + + +def getInterfaceIps(ip_type="ipv4"): + res = [] + if ip_type == "ipv6": + test_ips = ["ff0e::c", "2606:4700:4700::1111"] + else: + test_ips = ['239.255.255.250', "8.8.8.8"] + + for test_ip in test_ips: + try: + s = createSocket(test_ip, sock_type=socket.SOCK_DGRAM) + s.connect((test_ip, 1)) + res.append(s.getsockname()[0]) + except: + pass + + try: + res += [ip[4][0] for ip in socket.getaddrinfo(socket.gethostname(), 1)] + except: + pass + + res = [re.sub("%.*", "", ip) for ip in res if getIpType(ip) == ip_type and isIp(ip)] + return list(set(res)) diff --git a/start.py b/start.py index 48d4b21c3..fc2bc0765 100644 --- a/start.py +++ b/start.py @@ -9,7 +9,8 @@ def main(): - sys.argv = [sys.argv[0]]+["--open_browser", "default_browser"]+sys.argv[1:] + if "--open_browser" not in sys.argv: + sys.argv = [sys.argv[0]] + ["--open_browser", "default_browser"] + sys.argv[1:] zeronet.main() if __name__ == '__main__': diff --git a/tools/coffee/coffee-script.js b/tools/coffee/coffee-script.js index 06671c21f..7fce39a68 100644 --- a/tools/coffee/coffee-script.js +++ b/tools/coffee/coffee-script.js @@ -1,12 +1,405 @@ /** - * CoffeeScript Compiler v1.10.0 + * CoffeeScript Compiler v1.12.6 * http://coffeescript.org * * Copyright 2011, Jeremy Ashkenas * Released under the MIT License */ -(function(root){var CoffeeScript=function(){function require(e){return require[e]}return require["./helpers"]=function(){var e={},t={exports:e};return function(){var t,n,i,r,s,o;e.starts=function(e,t,n){return t===e.substr(n,t.length)},e.ends=function(e,t,n){var i;return i=t.length,t===e.substr(e.length-i-(n||0),i)},e.repeat=s=function(e,t){var n;for(n="";t>0;)1&t&&(n+=e),t>>>=1,e+=e;return n},e.compact=function(e){var t,n,i,r;for(r=[],t=0,i=e.length;i>t;t++)n=e[t],n&&r.push(n);return r},e.count=function(e,t){var n,i;if(n=i=0,!t.length)return 1/0;for(;i=1+e.indexOf(t,i);)n++;return n},e.merge=function(e,t){return n(n({},e),t)},n=e.extend=function(e,t){var n,i;for(n in t)i=t[n],e[n]=i;return e},e.flatten=i=function(e){var t,n,r,s;for(n=[],r=0,s=e.length;s>r;r++)t=e[r],"[object Array]"===Object.prototype.toString.call(t)?n=n.concat(i(t)):n.push(t);return n},e.del=function(e,t){var n;return n=e[t],delete e[t],n},e.some=null!=(r=Array.prototype.some)?r:function(e){var t,n,i;for(n=0,i=this.length;i>n;n++)if(t=this[n],e(t))return!0;return!1},e.invertLiterate=function(e){var t,n,i;return i=!0,n=function(){var n,r,s,o;for(s=e.split("\n"),o=[],n=0,r=s.length;r>n;n++)t=s[n],i&&/^([ ]{4}|[ ]{0,3}\t)/.test(t)?o.push(t):(i=/^\s*$/.test(t))?o.push(t):o.push("# "+t);return o}(),n.join("\n")},t=function(e,t){return t?{first_line:e.first_line,first_column:e.first_column,last_line:t.last_line,last_column:t.last_column}:e},e.addLocationDataFn=function(e,n){return function(i){return"object"==typeof i&&i.updateLocationDataIfMissing&&i.updateLocationDataIfMissing(t(e,n)),i}},e.locationDataToString=function(e){var t;return"2"in e&&"first_line"in e[2]?t=e[2]:"first_line"in e&&(t=e),t?t.first_line+1+":"+(t.first_column+1)+"-"+(t.last_line+1+":"+(t.last_column+1)):"No location data"},e.baseFileName=function(e,t,n){var i,r;return null==t&&(t=!1),null==n&&(n=!1),r=n?/\\|\//:/\//,i=e.split(r),e=i[i.length-1],t&&e.indexOf(".")>=0?(i=e.split("."),i.pop(),"coffee"===i[i.length-1]&&i.length>1&&i.pop(),i.join(".")):e},e.isCoffee=function(e){return/\.((lit)?coffee|coffee\.md)$/.test(e)},e.isLiterate=function(e){return/\.(litcoffee|coffee\.md)$/.test(e)},e.throwSyntaxError=function(e,t){var n;throw n=new SyntaxError(e),n.location=t,n.toString=o,n.stack=""+n,n},e.updateSyntaxError=function(e,t,n){return e.toString===o&&(e.code||(e.code=t),e.filename||(e.filename=n),e.stack=""+e),e},o=function(){var e,t,n,i,r,o,a,c,l,h,u,p,d,f,m;return this.code&&this.location?(u=this.location,a=u.first_line,o=u.first_column,l=u.last_line,c=u.last_column,null==l&&(l=a),null==c&&(c=o),r=this.filename||"[stdin]",e=this.code.split("\n")[a],m=o,i=a===l?c+1:e.length,h=e.slice(0,m).replace(/[^\s]/g," ")+s("^",i-m),"undefined"!=typeof process&&null!==process&&(n=(null!=(p=process.stdout)?p.isTTY:void 0)&&!(null!=(d=process.env)?d.NODE_DISABLE_COLORS:void 0)),(null!=(f=this.colorful)?f:n)&&(t=function(e){return""+e+""},e=e.slice(0,m)+t(e.slice(m,i))+e.slice(i),h=t(h)),r+":"+(a+1)+":"+(o+1)+": error: "+this.message+"\n"+e+"\n"+h):Error.prototype.toString.call(this)},e.nameWhitespaceCharacter=function(e){switch(e){case" ":return"space";case"\n":return"newline";case"\r":return"carriage return";case" ":return"tab";default:return e}}}.call(this),t.exports}(),require["./rewriter"]=function(){var e={},t={exports:e};return function(){var t,n,i,r,s,o,a,c,l,h,u,p,d,f,m,g,v,b,y,k=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1},w=[].slice;for(f=function(e,t,n){var i;return i=[e,t],i.generated=!0,n&&(i.origin=n),i},e.Rewriter=function(){function e(){}return e.prototype.rewrite=function(e){return this.tokens=e,this.removeLeadingNewlines(),this.closeOpenCalls(),this.closeOpenIndexes(),this.normalizeLines(),this.tagPostfixConditionals(),this.addImplicitBracesAndParens(),this.addLocationDataToGeneratedTokens(),this.tokens},e.prototype.scanTokens=function(e){var t,n,i;for(i=this.tokens,t=0;n=i[t];)t+=e.call(this,n,t,i);return!0},e.prototype.detectEnd=function(e,t,n){var i,o,a,c,l;for(l=this.tokens,i=0;c=l[e];){if(0===i&&t.call(this,c,e))return n.call(this,c,e);if(!c||0>i)return n.call(this,c,e-1);o=c[0],k.call(s,o)>=0?i+=1:(a=c[0],k.call(r,a)>=0&&(i-=1)),e+=1}return e-1},e.prototype.removeLeadingNewlines=function(){var e,t,n,i,r;for(i=this.tokens,e=t=0,n=i.length;n>t&&(r=i[e][0],"TERMINATOR"===r);e=++t);return e?this.tokens.splice(0,e):void 0},e.prototype.closeOpenCalls=function(){var e,t;return t=function(e,t){var n;return")"===(n=e[0])||"CALL_END"===n||"OUTDENT"===e[0]&&")"===this.tag(t-1)},e=function(e,t){return this.tokens["OUTDENT"===e[0]?t-1:t][0]="CALL_END"},this.scanTokens(function(n,i){return"CALL_START"===n[0]&&this.detectEnd(i+1,t,e),1})},e.prototype.closeOpenIndexes=function(){var e,t;return t=function(e){var t;return"]"===(t=e[0])||"INDEX_END"===t},e=function(e){return e[0]="INDEX_END"},this.scanTokens(function(n,i){return"INDEX_START"===n[0]&&this.detectEnd(i+1,t,e),1})},e.prototype.indexOfTag=function(){var e,t,n,i,r,s,o;for(t=arguments[0],r=arguments.length>=2?w.call(arguments,1):[],e=0,n=i=0,s=r.length;s>=0?s>i:i>s;n=s>=0?++i:--i){for(;"HERECOMMENT"===this.tag(t+n+e);)e+=2;if(null!=r[n]&&("string"==typeof r[n]&&(r[n]=[r[n]]),o=this.tag(t+n+e),0>k.call(r[n],o)))return-1}return t+n+e-1},e.prototype.looksObjectish=function(e){var t,n;return this.indexOfTag(e,"@",null,":")>-1||this.indexOfTag(e,null,":")>-1?!0:(n=this.indexOfTag(e,s),n>-1&&(t=null,this.detectEnd(n+1,function(e){var t;return t=e[0],k.call(r,t)>=0},function(e,n){return t=n}),":"===this.tag(t+1))?!0:!1)},e.prototype.findTagsBackwards=function(e,t){var n,i,o,a,c,l,h;for(n=[];e>=0&&(n.length||(a=this.tag(e),0>k.call(t,a)&&(c=this.tag(e),0>k.call(s,c)||this.tokens[e].generated)&&(l=this.tag(e),0>k.call(u,l))));)i=this.tag(e),k.call(r,i)>=0&&n.push(this.tag(e)),o=this.tag(e),k.call(s,o)>=0&&n.length&&n.pop(),e-=1;return h=this.tag(e),k.call(t,h)>=0},e.prototype.addImplicitBracesAndParens=function(){var e,t;return e=[],t=null,this.scanTokens(function(i,h,p){var d,m,g,v,b,y,w,T,C,F,E,N,L,x,S,D,R,A,I,_,O,$,j,M,B,V,P,U;if(U=i[0],E=(N=h>0?p[h-1]:[])[0],C=(p.length-1>h?p[h+1]:[])[0],j=function(){return e[e.length-1]},M=h,g=function(e){return h-M+e},v=function(){var e,t;return null!=(e=j())?null!=(t=e[2])?t.ours:void 0:void 0},b=function(){var e;return v()&&"("===(null!=(e=j())?e[0]:void 0)},w=function(){var e;return v()&&"{"===(null!=(e=j())?e[0]:void 0)},y=function(){var e;return v&&"CONTROL"===(null!=(e=j())?e[0]:void 0)},B=function(t){var n;return n=null!=t?t:h,e.push(["(",n,{ours:!0}]),p.splice(n,0,f("CALL_START","(")),null==t?h+=1:void 0},d=function(){return e.pop(),p.splice(h,0,f("CALL_END",")",["","end of input",i[2]])),h+=1},V=function(t,n){var r,s;return null==n&&(n=!0),r=null!=t?t:h,e.push(["{",r,{sameLine:!0,startsLine:n,ours:!0}]),s=new String("{"),s.generated=!0,p.splice(r,0,f("{",s,i)),null==t?h+=1:void 0},m=function(t){return t=null!=t?t:h,e.pop(),p.splice(t,0,f("}","}",i)),h+=1},b()&&("IF"===U||"TRY"===U||"FINALLY"===U||"CATCH"===U||"CLASS"===U||"SWITCH"===U))return e.push(["CONTROL",h,{ours:!0}]),g(1);if("INDENT"===U&&v()){if("=>"!==E&&"->"!==E&&"["!==E&&"("!==E&&","!==E&&"{"!==E&&"TRY"!==E&&"ELSE"!==E&&"="!==E)for(;b();)d();return y()&&e.pop(),e.push([U,h]),g(1)}if(k.call(s,U)>=0)return e.push([U,h]),g(1);if(k.call(r,U)>=0){for(;v();)b()?d():w()?m():e.pop();t=e.pop()}if((k.call(c,U)>=0&&i.spaced||"?"===U&&h>0&&!p[h-1].spaced)&&(k.call(o,C)>=0||k.call(l,C)>=0&&!(null!=(L=p[h+1])?L.spaced:void 0)&&!(null!=(x=p[h+1])?x.newLine:void 0)))return"?"===U&&(U=i[0]="FUNC_EXIST"),B(h+1),g(2);if(k.call(c,U)>=0&&this.indexOfTag(h+1,"INDENT")>-1&&this.looksObjectish(h+2)&&!this.findTagsBackwards(h,["CLASS","EXTENDS","IF","CATCH","SWITCH","LEADING_WHEN","FOR","WHILE","UNTIL"]))return B(h+1),e.push(["INDENT",h+2]),g(3);if(":"===U){for(I=function(){var e;switch(!1){case e=this.tag(h-1),0>k.call(r,e):return t[1];case"@"!==this.tag(h-2):return h-2;default:return h-1}}.call(this);"HERECOMMENT"===this.tag(I-2);)I-=2;return this.insideForDeclaration="FOR"===C,P=0===I||(S=this.tag(I-1),k.call(u,S)>=0)||p[I-1].newLine,j()&&(D=j(),$=D[0],O=D[1],("{"===$||"INDENT"===$&&"{"===this.tag(O-1))&&(P||","===this.tag(I-1)||"{"===this.tag(I-1)))?g(1):(V(I,!!P),g(2))}if(w()&&k.call(u,U)>=0&&(j()[2].sameLine=!1),T="OUTDENT"===E||N.newLine,k.call(a,U)>=0||k.call(n,U)>=0&&T)for(;v();)if(R=j(),$=R[0],O=R[1],A=R[2],_=A.sameLine,P=A.startsLine,b()&&","!==E)d();else if(w()&&!this.insideForDeclaration&&_&&"TERMINATOR"!==U&&":"!==E)m();else{if(!w()||"TERMINATOR"!==U||","===E||P&&this.looksObjectish(h+1))break;if("HERECOMMENT"===C)return g(1);m()}if(!(","!==U||this.looksObjectish(h+1)||!w()||this.insideForDeclaration||"TERMINATOR"===C&&this.looksObjectish(h+2)))for(F="OUTDENT"===C?1:0;w();)m(h+F);return g(1)})},e.prototype.addLocationDataToGeneratedTokens=function(){return this.scanTokens(function(e,t,n){var i,r,s,o,a,c;return e[2]?1:e.generated||e.explicit?("{"===e[0]&&(s=null!=(a=n[t+1])?a[2]:void 0)?(r=s.first_line,i=s.first_column):(o=null!=(c=n[t-1])?c[2]:void 0)?(r=o.last_line,i=o.last_column):r=i=0,e[2]={first_line:r,first_column:i,last_line:r,last_column:i},1):1})},e.prototype.normalizeLines=function(){var e,t,r,s,o;return o=r=s=null,t=function(e,t){var r,s,a,c;return";"!==e[1]&&(r=e[0],k.call(p,r)>=0)&&!("TERMINATOR"===e[0]&&(s=this.tag(t+1),k.call(i,s)>=0))&&!("ELSE"===e[0]&&"THEN"!==o)&&!!("CATCH"!==(a=e[0])&&"FINALLY"!==a||"->"!==o&&"=>"!==o)||(c=e[0],k.call(n,c)>=0&&this.tokens[t-1].newLine)},e=function(e,t){return this.tokens.splice(","===this.tag(t-1)?t-1:t,0,s)},this.scanTokens(function(n,a,c){var l,h,u,p,f,m;if(m=n[0],"TERMINATOR"===m){if("ELSE"===this.tag(a+1)&&"OUTDENT"!==this.tag(a-1))return c.splice.apply(c,[a,1].concat(w.call(this.indentation()))),1;if(u=this.tag(a+1),k.call(i,u)>=0)return c.splice(a,1),0}if("CATCH"===m)for(l=h=1;2>=h;l=++h)if("OUTDENT"===(p=this.tag(a+l))||"TERMINATOR"===p||"FINALLY"===p)return c.splice.apply(c,[a+l,0].concat(w.call(this.indentation()))),2+l;return k.call(d,m)>=0&&"INDENT"!==this.tag(a+1)&&("ELSE"!==m||"IF"!==this.tag(a+1))?(o=m,f=this.indentation(c[a]),r=f[0],s=f[1],"THEN"===o&&(r.fromThen=!0),c.splice(a+1,0,r),this.detectEnd(a+2,t,e),"THEN"===m&&c.splice(a,1),1):1})},e.prototype.tagPostfixConditionals=function(){var e,t,n;return n=null,t=function(e,t){var n,i;return i=e[0],n=this.tokens[t-1][0],"TERMINATOR"===i||"INDENT"===i&&0>k.call(d,n)},e=function(e){return"INDENT"!==e[0]||e.generated&&!e.fromThen?n[0]="POST_"+n[0]:void 0},this.scanTokens(function(i,r){return"IF"!==i[0]?1:(n=i,this.detectEnd(r+1,t,e),1)})},e.prototype.indentation=function(e){var t,n;return t=["INDENT",2],n=["OUTDENT",2],e?(t.generated=n.generated=!0,t.origin=n.origin=e):t.explicit=n.explicit=!0,[t,n]},e.prototype.generate=f,e.prototype.tag=function(e){var t;return null!=(t=this.tokens[e])?t[0]:void 0},e}(),t=[["(",")"],["[","]"],["{","}"],["INDENT","OUTDENT"],["CALL_START","CALL_END"],["PARAM_START","PARAM_END"],["INDEX_START","INDEX_END"],["STRING_START","STRING_END"],["REGEX_START","REGEX_END"]],e.INVERSES=h={},s=[],r=[],m=0,v=t.length;v>m;m++)b=t[m],g=b[0],y=b[1],s.push(h[y]=g),r.push(h[g]=y);i=["CATCH","THEN","ELSE","FINALLY"].concat(r),c=["IDENTIFIER","SUPER",")","CALL_END","]","INDEX_END","@","THIS"],o=["IDENTIFIER","NUMBER","STRING","STRING_START","JS","REGEX","REGEX_START","NEW","PARAM_START","CLASS","IF","TRY","SWITCH","THIS","BOOL","NULL","UNDEFINED","UNARY","YIELD","UNARY_MATH","SUPER","THROW","@","->","=>","[","(","{","--","++"],l=["+","-"],a=["POST_IF","FOR","WHILE","UNTIL","WHEN","BY","LOOP","TERMINATOR"],d=["ELSE","->","=>","TRY","FINALLY","THEN"],p=["TERMINATOR","CATCH","FINALLY","ELSE","OUTDENT","LEADING_WHEN"],u=["TERMINATOR","INDENT","OUTDENT"],n=[".","?.","::","?::"]}.call(this),t.exports}(),require["./lexer"]=function(){var e={},t={exports:e};return function(){var t,n,i,r,s,o,a,c,l,h,u,p,d,f,m,g,v,b,y,k,w,T,C,F,E,N,L,x,S,D,R,A,I,_,O,$,j,M,B,V,P,U,G,H,q,X,W,Y,K,z,J,Q,Z,et,tt,nt,it,rt,st,ot,at,ct,lt,ht,ut=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1};ot=require("./rewriter"),P=ot.Rewriter,w=ot.INVERSES,at=require("./helpers"),nt=at.count,lt=at.starts,tt=at.compact,ct=at.repeat,it=at.invertLiterate,st=at.locationDataToString,ht=at.throwSyntaxError,e.Lexer=S=function(){function e(){}return e.prototype.tokenize=function(e,t){var n,i,r,s;for(null==t&&(t={}),this.literate=t.literate,this.indent=0,this.baseIndent=0,this.indebt=0,this.outdebt=0,this.indents=[],this.ends=[],this.tokens=[],this.seenFor=!1,this.chunkLine=t.line||0,this.chunkColumn=t.column||0,e=this.clean(e),r=0;this.chunk=e.slice(r);)if(n=this.identifierToken()||this.commentToken()||this.whitespaceToken()||this.lineToken()||this.stringToken()||this.numberToken()||this.regexToken()||this.jsToken()||this.literalToken(),s=this.getLineAndColumnFromChunk(n),this.chunkLine=s[0],this.chunkColumn=s[1],r+=n,t.untilBalanced&&0===this.ends.length)return{tokens:this.tokens,index:r};return this.closeIndentation(),(i=this.ends.pop())&&this.error("missing "+i.tag,i.origin[2]),t.rewrite===!1?this.tokens:(new P).rewrite(this.tokens)},e.prototype.clean=function(e){return e.charCodeAt(0)===t&&(e=e.slice(1)),e=e.replace(/\r/g,"").replace(z,""),et.test(e)&&(e="\n"+e,this.chunkLine--),this.literate&&(e=it(e)),e},e.prototype.identifierToken=function(){var e,t,n,i,r,c,l,h,u,p,d,f,m,g,b,y;return(h=v.exec(this.chunk))?(l=h[0],r=h[1],t=h[2],c=r.length,u=void 0,"own"===r&&"FOR"===this.tag()?(this.token("OWN",r),r.length):"from"===r&&"YIELD"===this.tag()?(this.token("FROM",r),r.length):(d=this.tokens,p=d[d.length-1],i=t||null!=p&&("."===(f=p[0])||"?."===f||"::"===f||"?::"===f||!p.spaced&&"@"===p[0]),b="IDENTIFIER",!i&&(ut.call(F,r)>=0||ut.call(a,r)>=0)&&(b=r.toUpperCase(),"WHEN"===b&&(m=this.tag(),ut.call(N,m)>=0)?b="LEADING_WHEN":"FOR"===b?this.seenFor=!0:"UNLESS"===b?b="IF":ut.call(J,b)>=0?b="UNARY":ut.call(B,b)>=0&&("INSTANCEOF"!==b&&this.seenFor?(b="FOR"+b,this.seenFor=!1):(b="RELATION","!"===this.value()&&(u=this.tokens.pop(),r="!"+r)))),ut.call(C,r)>=0&&(i?(b="IDENTIFIER",r=new String(r),r.reserved=!0):ut.call(V,r)>=0&&this.error("reserved word '"+r+"'",{length:r.length})),i||(ut.call(s,r)>=0&&(e=r,r=o[r]),b=function(){switch(r){case"!":return"UNARY";case"==":case"!=":return"COMPARE";case"&&":case"||":return"LOGIC";case"true":case"false":return"BOOL";case"break":case"continue":return"STATEMENT";default:return b}}()),y=this.token(b,r,0,c),e&&(y.origin=[b,e,y[2]]),y.variable=!i,u&&(g=[u[2].first_line,u[2].first_column],y[2].first_line=g[0],y[2].first_column=g[1]),t&&(n=l.lastIndexOf(":"),this.token(":",":",n,t.length)),l.length)):0},e.prototype.numberToken=function(){var e,t,n,i,r;return(n=I.exec(this.chunk))?(i=n[0],t=i.length,/^0[BOX]/.test(i)?this.error("radix prefix in '"+i+"' must be lowercase",{offset:1}):/E/.test(i)&&!/^0x/.test(i)?this.error("exponential notation in '"+i+"' must be indicated with a lowercase 'e'",{offset:i.indexOf("E")}):/^0\d*[89]/.test(i)?this.error("decimal literal '"+i+"' must not be prefixed with '0'",{length:t}):/^0\d+/.test(i)&&this.error("octal literal '"+i+"' must be prefixed with '0o'",{length:t}),(r=/^0o([0-7]+)/.exec(i))&&(i="0x"+parseInt(r[1],8).toString(16)),(e=/^0b([01]+)/.exec(i))&&(i="0x"+parseInt(e[1],2).toString(16)),this.token("NUMBER",i,0,t),t):0},e.prototype.stringToken=function(){var e,t,n,i,r,s,o,a,c,l,h,u,m,g,v,b;if(h=(Y.exec(this.chunk)||[])[0],!h)return 0;if(g=function(){switch(h){case"'":return W;case'"':return q;case"'''":return f;case'"""':return p}}(),s=3===h.length,u=this.matchWithInterpolations(g,h),b=u.tokens,r=u.index,e=b.length-1,n=h.charAt(0),s){for(a=null,i=function(){var e,t,n;for(n=[],o=e=0,t=b.length;t>e;o=++e)v=b[o],"NEOSTRING"===v[0]&&n.push(v[1]);return n}().join("#{}");l=d.exec(i);)t=l[1],(null===a||(m=t.length)>0&&a.length>m)&&(a=t);a&&(c=RegExp("^"+a,"gm")),this.mergeInterpolationTokens(b,{delimiter:n},function(t){return function(n,i){return n=t.formatString(n),0===i&&(n=n.replace(E,"")),i===e&&(n=n.replace(K,"")),c&&(n=n.replace(c,"")),n}}(this))}else this.mergeInterpolationTokens(b,{delimiter:n},function(t){return function(n,i){return n=t.formatString(n),n=n.replace(G,function(t,r){return 0===i&&0===r||i===e&&r+t.length===n.length?"":" "})}}(this));return r},e.prototype.commentToken=function(){var e,t,n;return(n=this.chunk.match(c))?(e=n[0],t=n[1],t&&((n=u.exec(e))&&this.error("block comments cannot contain "+n[0],{offset:n.index,length:n[0].length}),t.indexOf("\n")>=0&&(t=t.replace(RegExp("\\n"+ct(" ",this.indent),"g"),"\n")),this.token("HERECOMMENT",t,0,e.length)),e.length):0},e.prototype.jsToken=function(){var e,t;return"`"===this.chunk.charAt(0)&&(e=T.exec(this.chunk))?(this.token("JS",(t=e[0]).slice(1,-1),0,t.length),t.length):0},e.prototype.regexToken=function(){var e,t,n,r,s,o,a,c,l,h,u,p,d;switch(!1){case!(o=M.exec(this.chunk)):this.error("regular expressions cannot begin with "+o[2],{offset:o.index+o[1].length});break;case!(o=this.matchWithInterpolations(m,"///")):d=o.tokens,s=o.index;break;case!(o=$.exec(this.chunk)):if(p=o[0],e=o[1],t=o[2],this.validateEscapes(e,{isRegex:!0,offsetInChunk:1}),s=p.length,l=this.tokens,c=l[l.length-1],c)if(c.spaced&&(h=c[0],ut.call(i,h)>=0)){if(!t||O.test(p))return 0}else if(u=c[0],ut.call(A,u)>=0)return 0;t||this.error("missing / (unclosed regex)");break;default:return 0}switch(r=j.exec(this.chunk.slice(s))[0],n=s+r.length,a=this.makeToken("REGEX",null,0,n),!1){case!!Z.test(r):this.error("invalid regular expression flags "+r,{offset:s,length:r.length});break;case!(p||1===d.length):null==e&&(e=this.formatHeregex(d[0][1])),this.token("REGEX",""+this.makeDelimitedLiteral(e,{delimiter:"/"})+r,0,n,a);break;default:this.token("REGEX_START","(",0,0,a),this.token("IDENTIFIER","RegExp",0,0),this.token("CALL_START","(",0,0),this.mergeInterpolationTokens(d,{delimiter:'"',"double":!0},this.formatHeregex),r&&(this.token(",",",",s,0),this.token("STRING",'"'+r+'"',s,r.length)),this.token(")",")",n,0),this.token("REGEX_END",")",n,0)}return n},e.prototype.lineToken=function(){var e,t,n,i,r;if(!(n=R.exec(this.chunk)))return 0;if(t=n[0],this.seenFor=!1,r=t.length-1-t.lastIndexOf("\n"),i=this.unfinished(),r-this.indebt===this.indent)return i?this.suppressNewlines():this.newlineToken(0),t.length;if(r>this.indent){if(i)return this.indebt=r-this.indent,this.suppressNewlines(),t.length;if(!this.tokens.length)return this.baseIndent=this.indent=r,t.length;e=r-this.indent+this.outdebt,this.token("INDENT",e,t.length-r,r),this.indents.push(e),this.ends.push({tag:"OUTDENT"}),this.outdebt=this.indebt=0,this.indent=r}else this.baseIndent>r?this.error("missing indentation",{offset:t.length}):(this.indebt=0,this.outdentToken(this.indent-r,i,t.length));return t.length},e.prototype.outdentToken=function(e,t,n){var i,r,s,o;for(i=this.indent-e;e>0;)s=this.indents[this.indents.length-1],s?s===this.outdebt?(e-=this.outdebt,this.outdebt=0):this.outdebt>s?(this.outdebt-=s,e-=s):(r=this.indents.pop()+this.outdebt,n&&(o=this.chunk[n],ut.call(b,o)>=0)&&(i-=r-e,e=r),this.outdebt=0,this.pair("OUTDENT"),this.token("OUTDENT",e,0,n),e-=r):e=0;for(r&&(this.outdebt-=e);";"===this.value();)this.tokens.pop();return"TERMINATOR"===this.tag()||t||this.token("TERMINATOR","\n",n,0),this.indent=i,this},e.prototype.whitespaceToken=function(){var e,t,n,i;return(e=et.exec(this.chunk))||(t="\n"===this.chunk.charAt(0))?(i=this.tokens,n=i[i.length-1],n&&(n[e?"spaced":"newLine"]=!0),e?e[0].length:0):0},e.prototype.newlineToken=function(e){for(;";"===this.value();)this.tokens.pop();return"TERMINATOR"!==this.tag()&&this.token("TERMINATOR","\n",e,0),this},e.prototype.suppressNewlines=function(){return"\\"===this.value()&&this.tokens.pop(),this},e.prototype.literalToken=function(){var e,t,n,s,o,a,c,u,p,d;if((e=_.exec(this.chunk))?(d=e[0],r.test(d)&&this.tagParameters()):d=this.chunk.charAt(0),u=d,n=this.tokens,t=n[n.length-1],"="===d&&t&&(!t[1].reserved&&(s=t[1],ut.call(C,s)>=0)&&(t.origin&&(t=t.origin),this.error("reserved word '"+t[1]+"' can't be assigned",t[2])),"||"===(o=t[1])||"&&"===o))return t[0]="COMPOUND_ASSIGN",t[1]+="=",d.length;if(";"===d)this.seenFor=!1,u="TERMINATOR";else if(ut.call(D,d)>=0)u="MATH";else if(ut.call(l,d)>=0)u="COMPARE";else if(ut.call(h,d)>=0)u="COMPOUND_ASSIGN";else if(ut.call(J,d)>=0)u="UNARY";else if(ut.call(Q,d)>=0)u="UNARY_MATH";else if(ut.call(U,d)>=0)u="SHIFT";else if(ut.call(x,d)>=0||"?"===d&&(null!=t?t.spaced:void 0))u="LOGIC";else if(t&&!t.spaced)if("("===d&&(a=t[0],ut.call(i,a)>=0))"?"===t[0]&&(t[0]="FUNC_EXIST"),u="CALL_START";else if("["===d&&(c=t[0],ut.call(y,c)>=0))switch(u="INDEX_START",t[0]){case"?":t[0]="INDEX_SOAK"}switch(p=this.makeToken(u,d),d){case"(":case"{":case"[":this.ends.push({tag:w[d],origin:p});break;case")":case"}":case"]":this.pair(d)}return this.tokens.push(p),d.length},e.prototype.tagParameters=function(){var e,t,n,i;if(")"!==this.tag())return this;for(t=[],i=this.tokens,e=i.length,i[--e][0]="PARAM_END";n=i[--e];)switch(n[0]){case")":t.push(n);break;case"(":case"CALL_START":if(!t.length)return"("===n[0]?(n[0]="PARAM_START",this):this;t.pop()}return this},e.prototype.closeIndentation=function(){return this.outdentToken(this.indent)},e.prototype.matchWithInterpolations=function(t,n){var i,r,s,o,a,c,l,h,u,p,d,f,m,g,v;if(v=[],h=n.length,this.chunk.slice(0,h)!==n)return null;for(m=this.chunk.slice(h);;){if(g=t.exec(m)[0],this.validateEscapes(g,{isRegex:"/"===n.charAt(0),offsetInChunk:h}),v.push(this.makeToken("NEOSTRING",g,h)),m=m.slice(g.length),h+=g.length,"#{"!==m.slice(0,2))break;p=this.getLineAndColumnFromChunk(h+1),c=p[0],r=p[1],d=(new e).tokenize(m.slice(1),{line:c,column:r,untilBalanced:!0}),l=d.tokens,o=d.index,o+=1,u=l[0],i=l[l.length-1],u[0]=u[1]="(",i[0]=i[1]=")",i.origin=["","end of interpolation",i[2]],"TERMINATOR"===(null!=(f=l[1])?f[0]:void 0)&&l.splice(1,1),v.push(["TOKENS",l]),m=m.slice(o),h+=o}return m.slice(0,n.length)!==n&&this.error("missing "+n,{length:n.length}),s=v[0],a=v[v.length-1],s[2].first_column-=n.length,a[2].last_column+=n.length,0===a[1].length&&(a[2].last_column-=1),{tokens:v,index:h+n.length}},e.prototype.mergeInterpolationTokens=function(e,t,n){var i,r,s,o,a,c,l,h,u,p,d,f,m,g,v,b;for(e.length>1&&(u=this.token("STRING_START","(",0,0)),s=this.tokens.length,o=a=0,l=e.length;l>a;o=++a){switch(g=e[o],m=g[0],b=g[1],m){case"TOKENS":if(2===b.length)continue;h=b[0],v=b;break;case"NEOSTRING":if(i=n(g[1],o),0===i.length){if(0!==o)continue;r=this.tokens.length}2===o&&null!=r&&this.tokens.splice(r,2),g[0]="STRING",g[1]=this.makeDelimitedLiteral(i,t),h=g,v=[g]}this.tokens.length>s&&(p=this.token("+","+"),p[2]={first_line:h[2].first_line,first_column:h[2].first_column,last_line:h[2].first_line,last_column:h[2].first_column}),(d=this.tokens).push.apply(d,v)}return u?(c=e[e.length-1],u.origin=["STRING",null,{first_line:u[2].first_line,first_column:u[2].first_column,last_line:c[2].last_line,last_column:c[2].last_column}],f=this.token("STRING_END",")"),f[2]={first_line:c[2].last_line,first_column:c[2].last_column,last_line:c[2].last_line,last_column:c[2].last_column}):void 0},e.prototype.pair=function(e){var t,n,i,r,s;return i=this.ends,n=i[i.length-1],e!==(s=null!=n?n.tag:void 0)?("OUTDENT"!==s&&this.error("unmatched "+e),r=this.indents,t=r[r.length-1],this.outdentToken(t,!0),this.pair(e)):this.ends.pop()},e.prototype.getLineAndColumnFromChunk=function(e){var t,n,i,r,s;return 0===e?[this.chunkLine,this.chunkColumn]:(s=e>=this.chunk.length?this.chunk:this.chunk.slice(0,+(e-1)+1||9e9),i=nt(s,"\n"),t=this.chunkColumn,i>0?(r=s.split("\n"),n=r[r.length-1],t=n.length):t+=s.length,[this.chunkLine+i,t])},e.prototype.makeToken=function(e,t,n,i){var r,s,o,a,c;return null==n&&(n=0),null==i&&(i=t.length),s={},o=this.getLineAndColumnFromChunk(n),s.first_line=o[0],s.first_column=o[1],r=Math.max(0,i-1),a=this.getLineAndColumnFromChunk(n+r),s.last_line=a[0],s.last_column=a[1],c=[e,t,s]},e.prototype.token=function(e,t,n,i,r){var s;return s=this.makeToken(e,t,n,i),r&&(s.origin=r),this.tokens.push(s),s},e.prototype.tag=function(){var e,t;return e=this.tokens,t=e[e.length-1],null!=t?t[0]:void 0},e.prototype.value=function(){var e,t;return e=this.tokens,t=e[e.length-1],null!=t?t[1]:void 0},e.prototype.unfinished=function(){var e;return L.test(this.chunk)||"\\"===(e=this.tag())||"."===e||"?."===e||"?::"===e||"UNARY"===e||"MATH"===e||"UNARY_MATH"===e||"+"===e||"-"===e||"YIELD"===e||"**"===e||"SHIFT"===e||"RELATION"===e||"COMPARE"===e||"LOGIC"===e||"THROW"===e||"EXTENDS"===e},e.prototype.formatString=function(e){return e.replace(X,"$1")},e.prototype.formatHeregex=function(e){return e.replace(g,"$1$2")},e.prototype.validateEscapes=function(e,t){var n,i,r,s,o,a,c,l;return null==t&&(t={}),s=k.exec(e),!s||(s[0],n=s[1],a=s[2],i=s[3],l=s[4],t.isRegex&&a&&"0"!==a.charAt(0))?void 0:(o=a?"octal escape sequences are not allowed":"invalid escape sequence",r="\\"+(a||i||l),this.error(o+" "+r,{offset:(null!=(c=t.offsetInChunk)?c:0)+s.index+n.length,length:r.length}))},e.prototype.makeDelimitedLiteral=function(e,t){var n;return null==t&&(t={}),""===e&&"/"===t.delimiter&&(e="(?:)"),n=RegExp("(\\\\\\\\)|(\\\\0(?=[1-7]))|\\\\?("+t.delimiter+")|\\\\?(?:(\\n)|(\\r)|(\\u2028)|(\\u2029))|(\\\\.)","g"),e=e.replace(n,function(e,n,i,r,s,o,a,c,l){switch(!1){case!n:return t.double?n+n:n;case!i:return"\\x00";case!r:return"\\"+r;case!s:return"\\n";case!o:return"\\r";case!a:return"\\u2028";case!c:return"\\u2029";case!l:return t.double?"\\"+l:l}}),""+t.delimiter+e+t.delimiter},e.prototype.error=function(e,t){var n,i,r,s,o,a;return null==t&&(t={}),r="first_line"in t?t:(o=this.getLineAndColumnFromChunk(null!=(s=t.offset)?s:0),i=o[0],n=o[1],o,{first_line:i,first_column:n,last_column:n+(null!=(a=t.length)?a:1)-1}),ht(e,r)},e}(),F=["true","false","null","this","new","delete","typeof","in","instanceof","return","throw","break","continue","debugger","yield","if","else","switch","for","while","do","try","catch","finally","class","extends","super"],a=["undefined","then","unless","until","loop","of","by","when"],o={and:"&&",or:"||",is:"==",isnt:"!=",not:"!",yes:"true",no:"false",on:"true",off:"false"},s=function(){var e;e=[];for(rt in o)e.push(rt);return e}(),a=a.concat(s),V=["case","default","function","var","void","with","const","let","enum","export","import","native","implements","interface","package","private","protected","public","static"],H=["arguments","eval","yield*"],C=F.concat(V).concat(H),e.RESERVED=V.concat(F).concat(a).concat(H),e.STRICT_PROSCRIBED=H,t=65279,v=/^(?!\d)((?:(?!\s)[$\w\x7f-\uffff])+)([^\n\S]*:(?!:))?/,I=/^0b[01]+|^0o[0-7]+|^0x[\da-f]+|^\d*\.?\d+(?:e[+-]?\d+)?/i,_=/^(?:[-=]>|[-+*\/%<>&|^!?=]=|>>>=?|([-+:])\1|([&|<>*\/%])\2=?|\?(\.|::)|\.{2,3})/,et=/^[^\n\S]+/,c=/^###([^#][\s\S]*?)(?:###[^\n\S]*|###$)|^(?:\s*#(?!##[^#]).*)+/,r=/^[-=]>/,R=/^(?:\n[^\n\S]*)+/,T=/^`[^\\`]*(?:\\.[^\\`]*)*`/,Y=/^(?:'''|"""|'|")/,W=/^(?:[^\\']|\\[\s\S])*/,q=/^(?:[^\\"#]|\\[\s\S]|\#(?!\{))*/,f=/^(?:[^\\']|\\[\s\S]|'(?!''))*/,p=/^(?:[^\\"#]|\\[\s\S]|"(?!"")|\#(?!\{))*/,X=/((?:\\\\)+)|\\[^\S\n]*\n\s*/g,G=/\s*\n\s*/g,d=/\n+([^\n\S]*)(?=\S)/g,$=/^\/(?!\/)((?:[^[\/\n\\]|\\[^\n]|\[(?:\\[^\n]|[^\]\n\\])*\])*)(\/)?/,j=/^\w*/,Z=/^(?!.*(.).*\1)[imgy]*$/,m=/^(?:[^\\\/#]|\\[\s\S]|\/(?!\/\/)|\#(?!\{))*/,g=/((?:\\\\)+)|\\(\s)|\s+(?:#.*)?/g,M=/^(\/|\/{3}\s*)(\*)/,O=/^\/=?\s/,u=/\*\//,L=/^\s*(?:,|\??\.(?![.\d])|::)/,k=/((?:^|[^\\])(?:\\\\)*)\\(?:(0[0-7]|[1-7])|(x(?![\da-fA-F]{2}).{0,2})|(u(?![\da-fA-F]{4}).{0,4}))/,E=/^[^\n\S]*\n/,K=/\n[^\n\S]*$/,z=/\s+$/,h=["-=","+=","/=","*=","%=","||=","&&=","?=","<<=",">>=",">>>=","&=","^=","|=","**=","//=","%%="],J=["NEW","TYPEOF","DELETE","DO"],Q=["!","~"],x=["&&","||","&","|","^"],U=["<<",">>",">>>"],l=["==","!=","<",">","<=",">="],D=["*","/","%","//","%%"],B=["IN","OF","INSTANCEOF"],n=["TRUE","FALSE"],i=["IDENTIFIER",")","]","?","@","THIS","SUPER"],y=i.concat(["NUMBER","STRING","STRING_END","REGEX","REGEX_END","BOOL","NULL","UNDEFINED","}","::"]),A=y.concat(["++","--"]),N=["INDENT","OUTDENT","TERMINATOR"],b=[")","}","]"]}.call(this),t.exports}(),require["./parser"]=function(){var e={},t={exports:e},n=function(){function e(){this.yy={}}var t=function(e,t,n,i){for(n=n||{},i=e.length;i--;n[e[i]]=t);return n},n=[1,20],i=[1,75],r=[1,71],s=[1,76],o=[1,77],a=[1,73],c=[1,74],l=[1,50],h=[1,52],u=[1,53],p=[1,54],d=[1,55],f=[1,45],m=[1,46],g=[1,27],v=[1,60],b=[1,61],y=[1,70],k=[1,43],w=[1,26],T=[1,58],C=[1,59],F=[1,57],E=[1,38],N=[1,44],L=[1,56],x=[1,65],S=[1,66],D=[1,67],R=[1,68],A=[1,42],I=[1,64],_=[1,29],O=[1,30],$=[1,31],j=[1,32],M=[1,33],B=[1,34],V=[1,35],P=[1,78],U=[1,6,26,34,109],G=[1,88],H=[1,81],q=[1,80],X=[1,79],W=[1,82],Y=[1,83],K=[1,84],z=[1,85],J=[1,86],Q=[1,87],Z=[1,91],et=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],tt=[1,97],nt=[1,98],it=[1,99],rt=[1,100],st=[1,102],ot=[1,103],at=[1,96],ct=[2,115],lt=[1,6,25,26,34,56,61,64,73,74,75,76,78,80,81,85,91,92,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],ht=[2,82],ut=[1,108],pt=[2,61],dt=[1,112],ft=[1,117],mt=[1,118],gt=[1,120],vt=[1,6,25,26,34,46,56,61,64,73,74,75,76,78,80,81,85,91,92,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],bt=[2,79],yt=[1,6,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],kt=[1,155],wt=[1,157],Tt=[1,152],Ct=[1,6,25,26,34,46,56,61,64,73,74,75,76,78,80,81,85,87,91,92,93,98,100,109,111,112,113,117,118,133,136,137,140,141,142,143,144,145,146,147,148,149],Ft=[2,98],Et=[1,6,25,26,34,49,56,61,64,73,74,75,76,78,80,81,85,91,92,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],Nt=[1,6,25,26,34,46,49,56,61,64,73,74,75,76,78,80,81,85,87,91,92,93,98,100,109,111,112,113,117,118,124,125,133,136,137,140,141,142,143,144,145,146,147,148,149],Lt=[1,207],xt=[1,206],St=[1,6,25,26,34,38,56,61,64,73,74,75,76,78,80,81,85,91,92,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],Dt=[2,59],Rt=[1,217],At=[6,25,26,56,61],It=[6,25,26,46,56,61,64],_t=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,136,137,143,145,146,147,148],Ot=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133],$t=[73,74,75,76,78,81,91,92],jt=[1,236],Mt=[2,136],Bt=[1,6,25,26,34,46,56,61,64,73,74,75,76,78,80,81,85,91,92,93,98,100,109,111,112,113,117,118,124,125,133,136,137,142,143,144,145,146,147,148],Vt=[1,245],Pt=[6,25,26,61,93,98],Ut=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,118,133],Gt=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,112,118,133],Ht=[124,125],qt=[61,124,125],Xt=[1,256],Wt=[6,25,26,61,85],Yt=[6,25,26,49,61,85],Kt=[6,25,26,46,49,61,85],zt=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,136,137,145,146,147,148],Jt=[11,28,30,32,33,36,37,40,41,42,43,44,52,53,54,58,59,80,83,86,90,95,96,97,103,107,108,111,113,115,117,126,132,134,135,136,137,138,140,141],Qt=[2,125],Zt=[6,25,26],en=[2,60],tn=[1,270],nn=[1,271],rn=[1,6,25,26,34,56,61,64,80,85,93,98,100,105,106,109,111,112,113,117,118,128,130,133,136,137,142,143,144,145,146,147,148],sn=[26,128,130],on=[1,6,26,34,56,61,64,80,85,93,98,100,109,112,118,133],an=[2,74],cn=[1,293],ln=[1,294],hn=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,128,133,136,137,142,143,144,145,146,147,148],un=[1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,113,117,118,133],pn=[1,305],dn=[1,306],fn=[6,25,26,61],mn=[1,6,25,26,34,56,61,64,80,85,93,98,100,105,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],gn=[25,61],vn={trace:function(){},yy:{},symbols_:{error:2,Root:3,Body:4,Line:5,TERMINATOR:6,Expression:7,Statement:8,Return:9,Comment:10,STATEMENT:11,Value:12,Invocation:13,Code:14,Operation:15,Assign:16,If:17,Try:18,While:19,For:20,Switch:21,Class:22,Throw:23,Block:24,INDENT:25,OUTDENT:26,Identifier:27,IDENTIFIER:28,AlphaNumeric:29,NUMBER:30,String:31,STRING:32,STRING_START:33,STRING_END:34,Regex:35,REGEX:36,REGEX_START:37,REGEX_END:38,Literal:39,JS:40,DEBUGGER:41,UNDEFINED:42,NULL:43,BOOL:44,Assignable:45,"=":46,AssignObj:47,ObjAssignable:48,":":49,SimpleObjAssignable:50,ThisProperty:51,RETURN:52,HERECOMMENT:53,PARAM_START:54,ParamList:55,PARAM_END:56,FuncGlyph:57,"->":58,"=>":59,OptComma:60,",":61,Param:62,ParamVar:63,"...":64,Array:65,Object:66,Splat:67,SimpleAssignable:68,Accessor:69,Parenthetical:70,Range:71,This:72,".":73,"?.":74,"::":75,"?::":76,Index:77,INDEX_START:78,IndexValue:79,INDEX_END:80,INDEX_SOAK:81,Slice:82,"{":83,AssignList:84,"}":85,CLASS:86,EXTENDS:87,OptFuncExist:88,Arguments:89,SUPER:90,FUNC_EXIST:91,CALL_START:92,CALL_END:93,ArgList:94,THIS:95,"@":96,"[":97,"]":98,RangeDots:99,"..":100,Arg:101,SimpleArgs:102,TRY:103,Catch:104,FINALLY:105,CATCH:106,THROW:107,"(":108,")":109,WhileSource:110,WHILE:111,WHEN:112,UNTIL:113,Loop:114,LOOP:115,ForBody:116,FOR:117,BY:118,ForStart:119,ForSource:120,ForVariables:121,OWN:122,ForValue:123,FORIN:124,FOROF:125,SWITCH:126,Whens:127,ELSE:128,When:129,LEADING_WHEN:130,IfBlock:131,IF:132,POST_IF:133,UNARY:134,UNARY_MATH:135,"-":136,"+":137,YIELD:138,FROM:139,"--":140,"++":141,"?":142,MATH:143,"**":144,SHIFT:145,COMPARE:146,LOGIC:147,RELATION:148,COMPOUND_ASSIGN:149,$accept:0,$end:1},terminals_:{2:"error",6:"TERMINATOR",11:"STATEMENT",25:"INDENT",26:"OUTDENT",28:"IDENTIFIER",30:"NUMBER",32:"STRING",33:"STRING_START",34:"STRING_END",36:"REGEX",37:"REGEX_START",38:"REGEX_END",40:"JS",41:"DEBUGGER",42:"UNDEFINED",43:"NULL",44:"BOOL",46:"=",49:":",52:"RETURN",53:"HERECOMMENT",54:"PARAM_START",56:"PARAM_END",58:"->",59:"=>",61:",",64:"...",73:".",74:"?.",75:"::",76:"?::",78:"INDEX_START",80:"INDEX_END",81:"INDEX_SOAK",83:"{",85:"}",86:"CLASS",87:"EXTENDS",90:"SUPER",91:"FUNC_EXIST",92:"CALL_START",93:"CALL_END",95:"THIS",96:"@",97:"[",98:"]",100:"..",103:"TRY",105:"FINALLY",106:"CATCH",107:"THROW",108:"(",109:")",111:"WHILE",112:"WHEN",113:"UNTIL",115:"LOOP",117:"FOR",118:"BY",122:"OWN",124:"FORIN",125:"FOROF",126:"SWITCH",128:"ELSE",130:"LEADING_WHEN",132:"IF",133:"POST_IF",134:"UNARY",135:"UNARY_MATH",136:"-",137:"+",138:"YIELD",139:"FROM",140:"--",141:"++",142:"?",143:"MATH",144:"**",145:"SHIFT",146:"COMPARE",147:"LOGIC",148:"RELATION",149:"COMPOUND_ASSIGN"},productions_:[0,[3,0],[3,1],[4,1],[4,3],[4,2],[5,1],[5,1],[8,1],[8,1],[8,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[24,2],[24,3],[27,1],[29,1],[29,1],[31,1],[31,3],[35,1],[35,3],[39,1],[39,1],[39,1],[39,1],[39,1],[39,1],[39,1],[16,3],[16,4],[16,5],[47,1],[47,3],[47,5],[47,3],[47,5],[47,1],[50,1],[50,1],[48,1],[48,1],[9,2],[9,1],[10,1],[14,5],[14,2],[57,1],[57,1],[60,0],[60,1],[55,0],[55,1],[55,3],[55,4],[55,6],[62,1],[62,2],[62,3],[62,1],[63,1],[63,1],[63,1],[63,1],[67,2],[68,1],[68,2],[68,2],[68,1],[45,1],[45,1],[45,1],[12,1],[12,1],[12,1],[12,1],[12,1],[69,2],[69,2],[69,2],[69,2],[69,1],[69,1],[77,3],[77,2],[79,1],[79,1],[66,4],[84,0],[84,1],[84,3],[84,4],[84,6],[22,1],[22,2],[22,3],[22,4],[22,2],[22,3],[22,4],[22,5],[13,3],[13,3],[13,1],[13,2],[88,0],[88,1],[89,2],[89,4],[72,1],[72,1],[51,2],[65,2],[65,4],[99,1],[99,1],[71,5],[82,3],[82,2],[82,2],[82,1],[94,1],[94,3],[94,4],[94,4],[94,6],[101,1],[101,1],[101,1],[102,1],[102,3],[18,2],[18,3],[18,4],[18,5],[104,3],[104,3],[104,2],[23,2],[70,3],[70,5],[110,2],[110,4],[110,2],[110,4],[19,2],[19,2],[19,2],[19,1],[114,2],[114,2],[20,2],[20,2],[20,2],[116,2],[116,4],[116,2],[119,2],[119,3],[123,1],[123,1],[123,1],[123,1],[121,1],[121,3],[120,2],[120,2],[120,4],[120,4],[120,4],[120,6],[120,6],[21,5],[21,7],[21,4],[21,6],[127,1],[127,2],[129,3],[129,4],[131,3],[131,5],[17,1],[17,3],[17,3],[17,3],[15,2],[15,2],[15,2],[15,2],[15,2],[15,2],[15,3],[15,2],[15,2],[15,2],[15,2],[15,2],[15,3],[15,3],[15,3],[15,3],[15,3],[15,3],[15,3],[15,3],[15,3],[15,5],[15,4],[15,3]],performAction:function(e,t,n,i,r,s,o){var a=s.length-1; -switch(r){case 1:return this.$=i.addLocationDataFn(o[a],o[a])(new i.Block);case 2:return this.$=s[a];case 3:this.$=i.addLocationDataFn(o[a],o[a])(i.Block.wrap([s[a]]));break;case 4:this.$=i.addLocationDataFn(o[a-2],o[a])(s[a-2].push(s[a]));break;case 5:this.$=s[a-1];break;case 6:case 7:case 8:case 9:case 11:case 12:case 13:case 14:case 15:case 16:case 17:case 18:case 19:case 20:case 21:case 22:case 27:case 32:case 34:case 47:case 48:case 49:case 50:case 51:case 59:case 60:case 70:case 71:case 72:case 73:case 78:case 79:case 82:case 86:case 92:case 136:case 137:case 139:case 169:case 170:case 186:case 192:this.$=s[a];break;case 10:case 25:case 26:case 28:case 30:case 33:case 35:this.$=i.addLocationDataFn(o[a],o[a])(new i.Literal(s[a]));break;case 23:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Block);break;case 24:case 31:case 93:this.$=i.addLocationDataFn(o[a-2],o[a])(s[a-1]);break;case 29:case 149:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Parens(s[a-1]));break;case 36:this.$=i.addLocationDataFn(o[a],o[a])(new i.Undefined);break;case 37:this.$=i.addLocationDataFn(o[a],o[a])(new i.Null);break;case 38:this.$=i.addLocationDataFn(o[a],o[a])(new i.Bool(s[a]));break;case 39:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Assign(s[a-2],s[a]));break;case 40:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Assign(s[a-3],s[a]));break;case 41:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Assign(s[a-4],s[a-1]));break;case 42:case 75:case 80:case 81:case 83:case 84:case 85:case 171:case 172:this.$=i.addLocationDataFn(o[a],o[a])(new i.Value(s[a]));break;case 43:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Assign(i.addLocationDataFn(o[a-2])(new i.Value(s[a-2])),s[a],"object",{operatorToken:i.addLocationDataFn(o[a-1])(new i.Literal(s[a-1]))}));break;case 44:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Assign(i.addLocationDataFn(o[a-4])(new i.Value(s[a-4])),s[a-1],"object",{operatorToken:i.addLocationDataFn(o[a-3])(new i.Literal(s[a-3]))}));break;case 45:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Assign(i.addLocationDataFn(o[a-2])(new i.Value(s[a-2])),s[a],null,{operatorToken:i.addLocationDataFn(o[a-1])(new i.Literal(s[a-1]))}));break;case 46:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Assign(i.addLocationDataFn(o[a-4])(new i.Value(s[a-4])),s[a-1],null,{operatorToken:i.addLocationDataFn(o[a-3])(new i.Literal(s[a-3]))}));break;case 52:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Return(s[a]));break;case 53:this.$=i.addLocationDataFn(o[a],o[a])(new i.Return);break;case 54:this.$=i.addLocationDataFn(o[a],o[a])(new i.Comment(s[a]));break;case 55:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Code(s[a-3],s[a],s[a-1]));break;case 56:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Code([],s[a],s[a-1]));break;case 57:this.$=i.addLocationDataFn(o[a],o[a])("func");break;case 58:this.$=i.addLocationDataFn(o[a],o[a])("boundfunc");break;case 61:case 98:this.$=i.addLocationDataFn(o[a],o[a])([]);break;case 62:case 99:case 131:case 173:this.$=i.addLocationDataFn(o[a],o[a])([s[a]]);break;case 63:case 100:case 132:this.$=i.addLocationDataFn(o[a-2],o[a])(s[a-2].concat(s[a]));break;case 64:case 101:case 133:this.$=i.addLocationDataFn(o[a-3],o[a])(s[a-3].concat(s[a]));break;case 65:case 102:case 135:this.$=i.addLocationDataFn(o[a-5],o[a])(s[a-5].concat(s[a-2]));break;case 66:this.$=i.addLocationDataFn(o[a],o[a])(new i.Param(s[a]));break;case 67:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Param(s[a-1],null,!0));break;case 68:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Param(s[a-2],s[a]));break;case 69:case 138:this.$=i.addLocationDataFn(o[a],o[a])(new i.Expansion);break;case 74:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Splat(s[a-1]));break;case 76:this.$=i.addLocationDataFn(o[a-1],o[a])(s[a-1].add(s[a]));break;case 77:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Value(s[a-1],[].concat(s[a])));break;case 87:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Access(s[a]));break;case 88:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Access(s[a],"soak"));break;case 89:this.$=i.addLocationDataFn(o[a-1],o[a])([i.addLocationDataFn(o[a-1])(new i.Access(new i.Literal("prototype"))),i.addLocationDataFn(o[a])(new i.Access(s[a]))]);break;case 90:this.$=i.addLocationDataFn(o[a-1],o[a])([i.addLocationDataFn(o[a-1])(new i.Access(new i.Literal("prototype"),"soak")),i.addLocationDataFn(o[a])(new i.Access(s[a]))]);break;case 91:this.$=i.addLocationDataFn(o[a],o[a])(new i.Access(new i.Literal("prototype")));break;case 94:this.$=i.addLocationDataFn(o[a-1],o[a])(i.extend(s[a],{soak:!0}));break;case 95:this.$=i.addLocationDataFn(o[a],o[a])(new i.Index(s[a]));break;case 96:this.$=i.addLocationDataFn(o[a],o[a])(new i.Slice(s[a]));break;case 97:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Obj(s[a-2],s[a-3].generated));break;case 103:this.$=i.addLocationDataFn(o[a],o[a])(new i.Class);break;case 104:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Class(null,null,s[a]));break;case 105:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Class(null,s[a]));break;case 106:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Class(null,s[a-1],s[a]));break;case 107:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Class(s[a]));break;case 108:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Class(s[a-1],null,s[a]));break;case 109:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Class(s[a-2],s[a]));break;case 110:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Class(s[a-3],s[a-1],s[a]));break;case 111:case 112:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Call(s[a-2],s[a],s[a-1]));break;case 113:this.$=i.addLocationDataFn(o[a],o[a])(new i.Call("super",[new i.Splat(new i.Literal("arguments"))]));break;case 114:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Call("super",s[a]));break;case 115:this.$=i.addLocationDataFn(o[a],o[a])(!1);break;case 116:this.$=i.addLocationDataFn(o[a],o[a])(!0);break;case 117:this.$=i.addLocationDataFn(o[a-1],o[a])([]);break;case 118:case 134:this.$=i.addLocationDataFn(o[a-3],o[a])(s[a-2]);break;case 119:case 120:this.$=i.addLocationDataFn(o[a],o[a])(new i.Value(new i.Literal("this")));break;case 121:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Value(i.addLocationDataFn(o[a-1])(new i.Literal("this")),[i.addLocationDataFn(o[a])(new i.Access(s[a]))],"this"));break;case 122:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Arr([]));break;case 123:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Arr(s[a-2]));break;case 124:this.$=i.addLocationDataFn(o[a],o[a])("inclusive");break;case 125:this.$=i.addLocationDataFn(o[a],o[a])("exclusive");break;case 126:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Range(s[a-3],s[a-1],s[a-2]));break;case 127:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Range(s[a-2],s[a],s[a-1]));break;case 128:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Range(s[a-1],null,s[a]));break;case 129:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Range(null,s[a],s[a-1]));break;case 130:this.$=i.addLocationDataFn(o[a],o[a])(new i.Range(null,null,s[a]));break;case 140:this.$=i.addLocationDataFn(o[a-2],o[a])([].concat(s[a-2],s[a]));break;case 141:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Try(s[a]));break;case 142:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Try(s[a-1],s[a][0],s[a][1]));break;case 143:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Try(s[a-2],null,null,s[a]));break;case 144:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Try(s[a-3],s[a-2][0],s[a-2][1],s[a]));break;case 145:this.$=i.addLocationDataFn(o[a-2],o[a])([s[a-1],s[a]]);break;case 146:this.$=i.addLocationDataFn(o[a-2],o[a])([i.addLocationDataFn(o[a-1])(new i.Value(s[a-1])),s[a]]);break;case 147:this.$=i.addLocationDataFn(o[a-1],o[a])([null,s[a]]);break;case 148:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Throw(s[a]));break;case 150:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Parens(s[a-2]));break;case 151:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.While(s[a]));break;case 152:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.While(s[a-2],{guard:s[a]}));break;case 153:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.While(s[a],{invert:!0}));break;case 154:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.While(s[a-2],{invert:!0,guard:s[a]}));break;case 155:this.$=i.addLocationDataFn(o[a-1],o[a])(s[a-1].addBody(s[a]));break;case 156:case 157:this.$=i.addLocationDataFn(o[a-1],o[a])(s[a].addBody(i.addLocationDataFn(o[a-1])(i.Block.wrap([s[a-1]]))));break;case 158:this.$=i.addLocationDataFn(o[a],o[a])(s[a]);break;case 159:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.While(i.addLocationDataFn(o[a-1])(new i.Literal("true"))).addBody(s[a]));break;case 160:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.While(i.addLocationDataFn(o[a-1])(new i.Literal("true"))).addBody(i.addLocationDataFn(o[a])(i.Block.wrap([s[a]]))));break;case 161:case 162:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.For(s[a-1],s[a]));break;case 163:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.For(s[a],s[a-1]));break;case 164:this.$=i.addLocationDataFn(o[a-1],o[a])({source:i.addLocationDataFn(o[a])(new i.Value(s[a]))});break;case 165:this.$=i.addLocationDataFn(o[a-3],o[a])({source:i.addLocationDataFn(o[a-2])(new i.Value(s[a-2])),step:s[a]});break;case 166:this.$=i.addLocationDataFn(o[a-1],o[a])(function(){return s[a].own=s[a-1].own,s[a].name=s[a-1][0],s[a].index=s[a-1][1],s[a]}());break;case 167:this.$=i.addLocationDataFn(o[a-1],o[a])(s[a]);break;case 168:this.$=i.addLocationDataFn(o[a-2],o[a])(function(){return s[a].own=!0,s[a]}());break;case 174:this.$=i.addLocationDataFn(o[a-2],o[a])([s[a-2],s[a]]);break;case 175:this.$=i.addLocationDataFn(o[a-1],o[a])({source:s[a]});break;case 176:this.$=i.addLocationDataFn(o[a-1],o[a])({source:s[a],object:!0});break;case 177:this.$=i.addLocationDataFn(o[a-3],o[a])({source:s[a-2],guard:s[a]});break;case 178:this.$=i.addLocationDataFn(o[a-3],o[a])({source:s[a-2],guard:s[a],object:!0});break;case 179:this.$=i.addLocationDataFn(o[a-3],o[a])({source:s[a-2],step:s[a]});break;case 180:this.$=i.addLocationDataFn(o[a-5],o[a])({source:s[a-4],guard:s[a-2],step:s[a]});break;case 181:this.$=i.addLocationDataFn(o[a-5],o[a])({source:s[a-4],step:s[a-2],guard:s[a]});break;case 182:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Switch(s[a-3],s[a-1]));break;case 183:this.$=i.addLocationDataFn(o[a-6],o[a])(new i.Switch(s[a-5],s[a-3],s[a-1]));break;case 184:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Switch(null,s[a-1]));break;case 185:this.$=i.addLocationDataFn(o[a-5],o[a])(new i.Switch(null,s[a-3],s[a-1]));break;case 187:this.$=i.addLocationDataFn(o[a-1],o[a])(s[a-1].concat(s[a]));break;case 188:this.$=i.addLocationDataFn(o[a-2],o[a])([[s[a-1],s[a]]]);break;case 189:this.$=i.addLocationDataFn(o[a-3],o[a])([[s[a-2],s[a-1]]]);break;case 190:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.If(s[a-1],s[a],{type:s[a-2]}));break;case 191:this.$=i.addLocationDataFn(o[a-4],o[a])(s[a-4].addElse(i.addLocationDataFn(o[a-2],o[a])(new i.If(s[a-1],s[a],{type:s[a-2]}))));break;case 193:this.$=i.addLocationDataFn(o[a-2],o[a])(s[a-2].addElse(s[a]));break;case 194:case 195:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.If(s[a],i.addLocationDataFn(o[a-2])(i.Block.wrap([s[a-2]])),{type:s[a-1],statement:!0}));break;case 196:case 197:case 200:case 201:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op(s[a-1],s[a]));break;case 198:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("-",s[a]));break;case 199:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("+",s[a]));break;case 202:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Op(s[a-2].concat(s[a-1]),s[a]));break;case 203:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("--",s[a]));break;case 204:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("++",s[a]));break;case 205:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("--",s[a-1],null,!0));break;case 206:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Op("++",s[a-1],null,!0));break;case 207:this.$=i.addLocationDataFn(o[a-1],o[a])(new i.Existence(s[a-1]));break;case 208:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Op("+",s[a-2],s[a]));break;case 209:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Op("-",s[a-2],s[a]));break;case 210:case 211:case 212:case 213:case 214:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Op(s[a-1],s[a-2],s[a]));break;case 215:this.$=i.addLocationDataFn(o[a-2],o[a])(function(){return"!"===s[a-1].charAt(0)?new i.Op(s[a-1].slice(1),s[a-2],s[a]).invert():new i.Op(s[a-1],s[a-2],s[a])}());break;case 216:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Assign(s[a-2],s[a],s[a-1]));break;case 217:this.$=i.addLocationDataFn(o[a-4],o[a])(new i.Assign(s[a-4],s[a-1],s[a-3]));break;case 218:this.$=i.addLocationDataFn(o[a-3],o[a])(new i.Assign(s[a-3],s[a],s[a-2]));break;case 219:this.$=i.addLocationDataFn(o[a-2],o[a])(new i.Extends(s[a-2],s[a]))}},table:[{1:[2,1],3:1,4:2,5:3,7:4,8:5,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{1:[3]},{1:[2,2],6:P},t(U,[2,3]),t(U,[2,6],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(U,[2,7],{119:69,110:92,116:93,111:x,113:S,117:R,133:Z}),t(et,[2,11],{88:94,69:95,77:101,73:tt,74:nt,75:it,76:rt,78:st,81:ot,91:at,92:ct}),t(et,[2,12],{77:101,88:104,69:105,73:tt,74:nt,75:it,76:rt,78:st,81:ot,91:at,92:ct}),t(et,[2,13]),t(et,[2,14]),t(et,[2,15]),t(et,[2,16]),t(et,[2,17]),t(et,[2,18]),t(et,[2,19]),t(et,[2,20]),t(et,[2,21]),t(et,[2,22]),t(et,[2,8]),t(et,[2,9]),t(et,[2,10]),t(lt,ht,{46:[1,106]}),t(lt,[2,83]),t(lt,[2,84]),t(lt,[2,85]),t(lt,[2,86]),t([1,6,25,26,34,38,56,61,64,73,74,75,76,78,80,81,85,91,93,98,100,109,111,112,113,117,118,133,136,137,142,143,144,145,146,147,148],[2,113],{89:107,92:ut}),t([6,25,56,61],pt,{55:109,62:110,63:111,27:113,51:114,65:115,66:116,28:i,64:dt,83:y,96:ft,97:mt}),{24:119,25:gt},{7:121,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:123,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:124,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:125,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:127,8:126,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,139:[1,128],140:B,141:V},{12:130,13:131,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:132,51:63,65:47,66:48,68:129,70:23,71:24,72:25,83:y,90:w,95:T,96:C,97:F,108:L},{12:130,13:131,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:132,51:63,65:47,66:48,68:133,70:23,71:24,72:25,83:y,90:w,95:T,96:C,97:F,108:L},t(vt,bt,{87:[1,137],140:[1,134],141:[1,135],149:[1,136]}),t(et,[2,192],{128:[1,138]}),{24:139,25:gt},{24:140,25:gt},t(et,[2,158]),{24:141,25:gt},{7:142,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,143],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(yt,[2,103],{39:22,70:23,71:24,72:25,65:47,66:48,29:49,35:51,27:62,51:63,31:72,12:130,13:131,45:132,24:144,68:146,25:gt,28:i,30:r,32:s,33:o,36:a,37:c,40:l,41:h,42:u,43:p,44:d,83:y,87:[1,145],90:w,95:T,96:C,97:F,108:L}),{7:147,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,142,143,144,145,146,147,148],[2,53],{12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,9:18,10:19,45:21,39:22,70:23,71:24,72:25,57:28,68:36,131:37,110:39,114:40,116:41,65:47,66:48,29:49,35:51,27:62,51:63,119:69,31:72,8:122,7:148,11:n,28:i,30:r,32:s,33:o,36:a,37:c,40:l,41:h,42:u,43:p,44:d,52:f,53:m,54:g,58:v,59:b,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,115:D,126:A,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V}),t(et,[2,54]),t(vt,[2,80]),t(vt,[2,81]),t(lt,[2,32]),t(lt,[2,33]),t(lt,[2,34]),t(lt,[2,35]),t(lt,[2,36]),t(lt,[2,37]),t(lt,[2,38]),{4:149,5:3,7:4,8:5,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,150],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:151,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:kt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,94:153,95:T,96:C,97:F,98:Tt,101:154,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(lt,[2,119]),t(lt,[2,120],{27:158,28:i}),{25:[2,57]},{25:[2,58]},t(Ct,[2,75]),t(Ct,[2,78]),{7:159,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:160,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:161,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:163,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,24:162,25:gt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{27:168,28:i,51:169,65:170,66:171,71:164,83:y,96:ft,97:F,121:165,122:[1,166],123:167},{120:172,124:[1,173],125:[1,174]},t([6,25,61,85],Ft,{31:72,84:175,47:176,48:177,50:178,10:179,29:180,27:181,51:182,28:i,30:r,32:s,33:o,53:m,96:ft}),t(Et,[2,26]),t(Et,[2,27]),t(lt,[2,30]),{12:130,13:183,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:132,51:63,65:47,66:48,68:184,70:23,71:24,72:25,83:y,90:w,95:T,96:C,97:F,108:L},t(Nt,[2,25]),t(Et,[2,28]),{4:185,5:3,7:4,8:5,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(U,[2,5],{7:4,8:5,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,9:18,10:19,45:21,39:22,70:23,71:24,72:25,57:28,68:36,131:37,110:39,114:40,116:41,65:47,66:48,29:49,35:51,27:62,51:63,119:69,31:72,5:186,11:n,28:i,30:r,32:s,33:o,36:a,37:c,40:l,41:h,42:u,43:p,44:d,52:f,53:m,54:g,58:v,59:b,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,111:x,113:S,115:D,117:R,126:A,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V}),t(et,[2,207]),{7:187,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:188,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:189,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:190,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:191,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:192,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:193,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:194,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:195,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,157]),t(et,[2,162]),{7:196,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,156]),t(et,[2,161]),{89:197,92:ut},t(Ct,[2,76]),{92:[2,116]},{27:198,28:i},{27:199,28:i},t(Ct,[2,91],{27:200,28:i}),{27:201,28:i},t(Ct,[2,92]),{7:203,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:Lt,65:47,66:48,68:36,70:23,71:24,72:25,79:202,82:204,83:y,86:k,90:w,95:T,96:C,97:F,99:205,100:xt,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{77:208,78:st,81:ot},{89:209,92:ut},t(Ct,[2,77]),{6:[1,211],7:210,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,212],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(St,[2,114]),{7:215,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:kt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,93:[1,213],94:214,95:T,96:C,97:F,101:154,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([6,25],Dt,{60:218,56:[1,216],61:Rt}),t(At,[2,62]),t(At,[2,66],{46:[1,220],64:[1,219]}),t(At,[2,69]),t(It,[2,70]),t(It,[2,71]),t(It,[2,72]),t(It,[2,73]),{27:158,28:i},{7:215,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:kt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,94:153,95:T,96:C,97:F,98:Tt,101:154,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,56]),{4:222,5:3,7:4,8:5,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,26:[1,221],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,136,137,143,144,145,146,147,148],[2,196],{119:69,110:89,116:90,142:X}),{110:92,111:x,113:S,116:93,117:R,119:69,133:Z},t(_t,[2,197],{119:69,110:89,116:90,142:X,144:Y}),t(_t,[2,198],{119:69,110:89,116:90,142:X,144:Y}),t(_t,[2,199],{119:69,110:89,116:90,142:X,144:Y}),t(et,[2,200],{119:69,110:92,116:93}),t(Ot,[2,201],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{7:223,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,203],{73:bt,74:bt,75:bt,76:bt,78:bt,81:bt,91:bt,92:bt}),{69:95,73:tt,74:nt,75:it,76:rt,77:101,78:st,81:ot,88:94,91:at,92:ct},{69:105,73:tt,74:nt,75:it,76:rt,77:101,78:st,81:ot,88:104,91:at,92:ct},t($t,ht),t(et,[2,204],{73:bt,74:bt,75:bt,76:bt,78:bt,81:bt,91:bt,92:bt}),t(et,[2,205]),t(et,[2,206]),{6:[1,226],7:224,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,225],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:227,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{24:228,25:gt,132:[1,229]},t(et,[2,141],{104:230,105:[1,231],106:[1,232]}),t(et,[2,155]),t(et,[2,163]),{25:[1,233],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{127:234,129:235,130:jt},t(et,[2,104]),{7:237,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(yt,[2,107],{24:238,25:gt,73:bt,74:bt,75:bt,76:bt,78:bt,81:bt,91:bt,92:bt,87:[1,239]}),t(Ot,[2,148],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ot,[2,52],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{6:P,109:[1,240]},{4:241,5:3,7:4,8:5,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([6,25,61,98],Mt,{119:69,110:89,116:90,99:242,64:[1,243],100:xt,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Bt,[2,122]),t([6,25,98],Dt,{60:244,61:Vt}),t(Pt,[2,131]),{7:215,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:kt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,94:246,95:T,96:C,97:F,101:154,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Pt,[2,137]),t(Pt,[2,138]),t(Nt,[2,121]),{24:247,25:gt,110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},t(Ut,[2,151],{119:69,110:89,116:90,111:x,112:[1,248],113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ut,[2,153],{119:69,110:89,116:90,111:x,112:[1,249],113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(et,[2,159]),t(Gt,[2,160],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,133,136,137,142,143,144,145,146,147,148],[2,164],{118:[1,250]}),t(Ht,[2,167]),{27:168,28:i,51:169,65:170,66:171,83:y,96:ft,97:mt,121:251,123:167},t(Ht,[2,173],{61:[1,252]}),t(qt,[2,169]),t(qt,[2,170]),t(qt,[2,171]),t(qt,[2,172]),t(et,[2,166]),{7:253,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:254,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([6,25,85],Dt,{60:255,61:Xt}),t(Wt,[2,99]),t(Wt,[2,42],{49:[1,257]}),t(Yt,[2,50],{46:[1,258]}),t(Wt,[2,47]),t(Yt,[2,51]),t(Kt,[2,48]),t(Kt,[2,49]),{38:[1,259],69:105,73:tt,74:nt,75:it,76:rt,77:101,78:st,81:ot,88:104,91:at,92:ct},t($t,bt),{6:P,34:[1,260]},t(U,[2,4]),t(zt,[2,208],{119:69,110:89,116:90,142:X,143:W,144:Y}),t(zt,[2,209],{119:69,110:89,116:90,142:X,143:W,144:Y}),t(_t,[2,210],{119:69,110:89,116:90,142:X,144:Y}),t(_t,[2,211],{119:69,110:89,116:90,142:X,144:Y}),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,145,146,147,148],[2,212],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y}),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,146,147],[2,213],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,148:Q}),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,147],[2,214],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,148:Q}),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,118,133,146,147,148],[2,215],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K}),t(Gt,[2,195],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Gt,[2,194],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(St,[2,111]),t(Ct,[2,87]),t(Ct,[2,88]),t(Ct,[2,89]),t(Ct,[2,90]),{80:[1,261]},{64:Lt,80:[2,95],99:262,100:xt,110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{80:[2,96]},{7:263,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,80:[2,130],83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Jt,[2,124]),t(Jt,Qt),t(Ct,[2,94]),t(St,[2,112]),t(Ot,[2,39],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{7:264,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:265,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(St,[2,117]),t([6,25,93],Dt,{60:266,61:Vt}),t(Pt,Mt,{119:69,110:89,116:90,64:[1,267],111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{57:268,58:v,59:b},t(Zt,en,{63:111,27:113,51:114,65:115,66:116,62:269,28:i,64:dt,83:y,96:ft,97:mt}),{6:tn,25:nn},t(At,[2,67]),{7:272,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(rn,[2,23]),{6:P,26:[1,273]},t(Ot,[2,202],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ot,[2,216],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{7:274,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:275,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Ot,[2,219],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(et,[2,193]),{7:276,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,142],{105:[1,277]}),{24:278,25:gt},{24:281,25:gt,27:279,28:i,66:280,83:y},{127:282,129:235,130:jt},{26:[1,283],128:[1,284],129:285,130:jt},t(sn,[2,186]),{7:287,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,102:286,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(on,[2,105],{119:69,110:89,116:90,24:288,25:gt,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(et,[2,108]),{7:289,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(lt,[2,149]),{6:P,26:[1,290]},{7:291,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t([11,28,30,32,33,36,37,40,41,42,43,44,52,53,54,58,59,83,86,90,95,96,97,103,107,108,111,113,115,117,126,132,134,135,136,137,138,140,141],Qt,{6:an,25:an,61:an,98:an}),{6:cn,25:ln,98:[1,292]},t([6,25,26,93,98],en,{12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,9:18,10:19,45:21,39:22,70:23,71:24,72:25,57:28,68:36,131:37,110:39,114:40,116:41,65:47,66:48,29:49,35:51,27:62,51:63,119:69,31:72,8:122,67:156,7:215,101:295,11:n,28:i,30:r,32:s,33:o,36:a,37:c,40:l,41:h,42:u,43:p,44:d,52:f,53:m,54:g,58:v,59:b,64:wt,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,111:x,113:S,115:D,117:R,126:A,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V}),t(Zt,Dt,{60:296,61:Vt}),t(hn,[2,190]),{7:297,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:298,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:299,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Ht,[2,168]),{27:168,28:i,51:169,65:170,66:171,83:y,96:ft,97:mt,123:300},t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,113,117,133],[2,175],{119:69,110:89,116:90,112:[1,301],118:[1,302],136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(un,[2,176],{119:69,110:89,116:90,112:[1,303],136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{6:pn,25:dn,85:[1,304]},t([6,25,26,85],en,{31:72,48:177,50:178,10:179,29:180,27:181,51:182,47:307,28:i,30:r,32:s,33:o,53:m,96:ft}),{7:308,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,309],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:310,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:[1,311],27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(lt,[2,31]),t(Et,[2,29]),t(Ct,[2,93]),{7:312,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,80:[2,128],83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{80:[2,129],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},t(Ot,[2,40],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{26:[1,313],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{6:cn,25:ln,93:[1,314]},t(Pt,an),{24:315,25:gt},t(At,[2,63]),{27:113,28:i,51:114,62:316,63:111,64:dt,65:115,66:116,83:y,96:ft,97:mt},t(fn,pt,{62:110,63:111,27:113,51:114,65:115,66:116,55:317,28:i,64:dt,83:y,96:ft,97:mt}),t(At,[2,68],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(rn,[2,24]),{26:[1,318],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},t(Ot,[2,218],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{24:319,25:gt,110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{24:320,25:gt},t(et,[2,143]),{24:321,25:gt},{24:322,25:gt},t(mn,[2,147]),{26:[1,323],128:[1,324],129:285,130:jt},t(et,[2,184]),{24:325,25:gt},t(sn,[2,187]),{24:326,25:gt,61:[1,327]},t(gn,[2,139],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(et,[2,106]),t(on,[2,109],{119:69,110:89,116:90,24:328,25:gt,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{109:[1,329]},{98:[1,330],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},t(Bt,[2,123]),{7:215,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,101:331,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:215,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,25:kt,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,64:wt,65:47,66:48,67:156,68:36,70:23,71:24,72:25,83:y,86:k,90:w,94:332,95:T,96:C,97:F,101:154,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Pt,[2,132]),{6:cn,25:ln,26:[1,333]},t(Gt,[2,152],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Gt,[2,154],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Gt,[2,165],{119:69,110:89,116:90,111:x,113:S,117:R,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ht,[2,174]),{7:334,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:335,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:336,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Bt,[2,97]),{10:179,27:181,28:i,29:180,30:r,31:72,32:s,33:o,47:337,48:177,50:178,51:182,53:m,96:ft},t(fn,Ft,{31:72,47:176,48:177,50:178,10:179,29:180,27:181,51:182,84:338,28:i,30:r,32:s,33:o,53:m,96:ft}),t(Wt,[2,100]),t(Wt,[2,43],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{7:339,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(Wt,[2,45],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{7:340,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{80:[2,127],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},t(et,[2,41]),t(St,[2,118]),t(et,[2,55]),t(At,[2,64]),t(Zt,Dt,{60:341,61:Rt}),t(et,[2,217]),t(hn,[2,191]),t(et,[2,144]),t(mn,[2,145]),t(mn,[2,146]),t(et,[2,182]),{24:342,25:gt},{26:[1,343]},t(sn,[2,188],{6:[1,344]}),{7:345,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},t(et,[2,110]),t(lt,[2,150]),t(lt,[2,126]),t(Pt,[2,133]),t(Zt,Dt,{60:346,61:Vt}),t(Pt,[2,134]),t([1,6,25,26,34,56,61,64,80,85,93,98,100,109,111,112,113,117,133],[2,177],{119:69,110:89,116:90,118:[1,347],136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(un,[2,179],{119:69,110:89,116:90,112:[1,348],136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ot,[2,178],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Wt,[2,101]),t(Zt,Dt,{60:349,61:Xt}),{26:[1,350],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{26:[1,351],110:89,111:x,113:S,116:90,117:R,119:69,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q},{6:tn,25:nn,26:[1,352]},{26:[1,353]},t(et,[2,185]),t(sn,[2,189]),t(gn,[2,140],{119:69,110:89,116:90,111:x,113:S,117:R,133:G,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),{6:cn,25:ln,26:[1,354]},{7:355,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{7:356,8:122,9:18,10:19,11:n,12:6,13:7,14:8,15:9,16:10,17:11,18:12,19:13,20:14,21:15,22:16,23:17,27:62,28:i,29:49,30:r,31:72,32:s,33:o,35:51,36:a,37:c,39:22,40:l,41:h,42:u,43:p,44:d,45:21,51:63,52:f,53:m,54:g,57:28,58:v,59:b,65:47,66:48,68:36,70:23,71:24,72:25,83:y,86:k,90:w,95:T,96:C,97:F,103:E,107:N,108:L,110:39,111:x,113:S,114:40,115:D,116:41,117:R,119:69,126:A,131:37,132:I,134:_,135:O,136:$,137:j,138:M,140:B,141:V},{6:pn,25:dn,26:[1,357]},t(Wt,[2,44]),t(Wt,[2,46]),t(At,[2,65]),t(et,[2,183]),t(Pt,[2,135]),t(Ot,[2,180],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Ot,[2,181],{119:69,110:89,116:90,136:H,137:q,142:X,143:W,144:Y,145:K,146:z,147:J,148:Q}),t(Wt,[2,102])],defaultActions:{60:[2,57],61:[2,58],96:[2,116],204:[2,96]},parseError:function(e,t){if(!t.recoverable)throw Error(e); -this.trace(e)},parse:function(e){function t(){var e;return e=f.lex()||p,"number"!=typeof e&&(e=n.symbols_[e]||e),e}var n=this,i=[0],r=[null],s=[],o=this.table,a="",c=0,l=0,h=0,u=2,p=1,d=s.slice.call(arguments,1),f=Object.create(this.lexer),m={yy:{}};for(var g in this.yy)Object.prototype.hasOwnProperty.call(this.yy,g)&&(m.yy[g]=this.yy[g]);f.setInput(e,m.yy),m.yy.lexer=f,m.yy.parser=this,f.yylloc===void 0&&(f.yylloc={});var v=f.yylloc;s.push(v);var b=f.options&&f.options.ranges;this.parseError="function"==typeof m.yy.parseError?m.yy.parseError:Object.getPrototypeOf(this).parseError;for(var y,k,w,T,C,F,E,N,L,x={};;){if(w=i[i.length-1],this.defaultActions[w]?T=this.defaultActions[w]:((null===y||y===void 0)&&(y=t()),T=o[w]&&o[w][y]),T===void 0||!T.length||!T[0]){var S="";L=[];for(F in o[w])this.terminals_[F]&&F>u&&L.push("'"+this.terminals_[F]+"'");S=f.showPosition?"Parse error on line "+(c+1)+":\n"+f.showPosition()+"\nExpecting "+L.join(", ")+", got '"+(this.terminals_[y]||y)+"'":"Parse error on line "+(c+1)+": Unexpected "+(y==p?"end of input":"'"+(this.terminals_[y]||y)+"'"),this.parseError(S,{text:f.match,token:this.terminals_[y]||y,line:f.yylineno,loc:v,expected:L})}if(T[0]instanceof Array&&T.length>1)throw Error("Parse Error: multiple actions possible at state: "+w+", token: "+y);switch(T[0]){case 1:i.push(y),r.push(f.yytext),s.push(f.yylloc),i.push(T[1]),y=null,k?(y=k,k=null):(l=f.yyleng,a=f.yytext,c=f.yylineno,v=f.yylloc,h>0&&h--);break;case 2:if(E=this.productions_[T[1]][1],x.$=r[r.length-E],x._$={first_line:s[s.length-(E||1)].first_line,last_line:s[s.length-1].last_line,first_column:s[s.length-(E||1)].first_column,last_column:s[s.length-1].last_column},b&&(x._$.range=[s[s.length-(E||1)].range[0],s[s.length-1].range[1]]),C=this.performAction.apply(x,[a,l,c,m.yy,T[1],r,s].concat(d)),C!==void 0)return C;E&&(i=i.slice(0,2*-1*E),r=r.slice(0,-1*E),s=s.slice(0,-1*E)),i.push(this.productions_[T[1]][0]),r.push(x.$),s.push(x._$),N=o[i[i.length-2]][i[i.length-1]],i.push(N);break;case 3:return!0}}return!0}};return e.prototype=vn,vn.Parser=e,new e}();return require!==void 0&&e!==void 0&&(e.parser=n,e.Parser=n.Parser,e.parse=function(){return n.parse.apply(n,arguments)},e.main=function(t){t[1]||(console.log("Usage: "+t[0]+" FILE"),process.exit(1));var n=require("fs").readFileSync(require("path").normalize(t[1]),"utf8");return e.parser.parse(n)},t!==void 0&&require.main===t&&e.main(process.argv.slice(1))),t.exports}(),require["./scope"]=function(){var e={},t={exports:e};return function(){var t,n=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1};e.Scope=t=function(){function e(e,t,n,i){var r,s;this.parent=e,this.expressions=t,this.method=n,this.referencedVars=i,this.variables=[{name:"arguments",type:"arguments"}],this.positions={},this.parent||(this.utilities={}),this.root=null!=(r=null!=(s=this.parent)?s.root:void 0)?r:this}return e.prototype.add=function(e,t,n){return this.shared&&!n?this.parent.add(e,t,n):Object.prototype.hasOwnProperty.call(this.positions,e)?this.variables[this.positions[e]].type=t:this.positions[e]=this.variables.push({name:e,type:t})-1},e.prototype.namedMethod=function(){var e;return(null!=(e=this.method)?e.name:void 0)||!this.parent?this.method:this.parent.namedMethod()},e.prototype.find=function(e){return this.check(e)?!0:(this.add(e,"var"),!1)},e.prototype.parameter=function(e){return this.shared&&this.parent.check(e,!0)?void 0:this.add(e,"param")},e.prototype.check=function(e){var t;return!!(this.type(e)||(null!=(t=this.parent)?t.check(e):void 0))},e.prototype.temporary=function(e,t,n){return null==n&&(n=!1),n?(t+parseInt(e,36)).toString(36).replace(/\d/g,"a"):e+(t||"")},e.prototype.type=function(e){var t,n,i,r;for(i=this.variables,t=0,n=i.length;n>t;t++)if(r=i[t],r.name===e)return r.type;return null},e.prototype.freeVariable=function(e,t){var i,r,s;for(null==t&&(t={}),i=0;;){if(s=this.temporary(e,i,t.single),!(this.check(s)||n.call(this.root.referencedVars,s)>=0))break;i++}return(null!=(r=t.reserve)?r:!0)&&this.add(s,"var",!0),s},e.prototype.assign=function(e,t){return this.add(e,{value:t,assigned:!0},!0),this.hasAssignments=!0},e.prototype.hasDeclarations=function(){return!!this.declaredVariables().length},e.prototype.declaredVariables=function(){var e;return function(){var t,n,i,r;for(i=this.variables,r=[],t=0,n=i.length;n>t;t++)e=i[t],"var"===e.type&&r.push(e.name);return r}.call(this).sort()},e.prototype.assignedVariables=function(){var e,t,n,i,r;for(n=this.variables,i=[],e=0,t=n.length;t>e;e++)r=n[e],r.type.assigned&&i.push(r.name+" = "+r.type.value);return i},e}()}.call(this),t.exports}(),require["./nodes"]=function(){var e={},t={exports:e};return function(){var t,n,i,r,s,o,a,c,l,h,u,p,d,f,m,g,v,b,y,k,w,T,C,F,E,N,L,x,S,D,R,A,I,_,O,$,j,M,B,V,P,U,G,H,q,X,W,Y,K,z,J,Q,Z,et,tt,nt,it,rt,st,ot,at,ct,lt,ht,ut,pt,dt,ft,mt,gt,vt,bt,yt,kt=function(e,t){function n(){this.constructor=e}for(var i in t)wt.call(t,i)&&(e[i]=t[i]);return n.prototype=t.prototype,e.prototype=new n,e.__super__=t.prototype,e},wt={}.hasOwnProperty,Tt=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1},Ct=[].slice;Error.stackTraceLimit=1/0,P=require("./scope").Scope,dt=require("./lexer"),$=dt.RESERVED,V=dt.STRICT_PROSCRIBED,ft=require("./helpers"),et=ft.compact,rt=ft.flatten,it=ft.extend,ht=ft.merge,tt=ft.del,gt=ft.starts,nt=ft.ends,mt=ft.some,Z=ft.addLocationDataFn,lt=ft.locationDataToString,vt=ft.throwSyntaxError,e.extend=it,e.addLocationDataFn=Z,Q=function(){return!0},D=function(){return!1},X=function(){return this},S=function(){return this.negated=!this.negated,this},e.CodeFragment=l=function(){function e(e,t){var n;this.code=""+t,this.locationData=null!=e?e.locationData:void 0,this.type=(null!=e?null!=(n=e.constructor)?n.name:void 0:void 0)||"unknown"}return e.prototype.toString=function(){return""+this.code+(this.locationData?": "+lt(this.locationData):"")},e}(),st=function(e){var t;return function(){var n,i,r;for(r=[],n=0,i=e.length;i>n;n++)t=e[n],r.push(t.code);return r}().join("")},e.Base=r=function(){function e(){}return e.prototype.compile=function(e,t){return st(this.compileToFragments(e,t))},e.prototype.compileToFragments=function(e,t){var n;return e=it({},e),t&&(e.level=t),n=this.unfoldSoak(e)||this,n.tab=e.indent,e.level!==L&&n.isStatement(e)?n.compileClosure(e):n.compileNode(e)},e.prototype.compileClosure=function(e){var n,i,r,a,l,h,u;return(a=this.jumps())&&a.error("cannot use a pure statement in an expression"),e.sharedScope=!0,r=new c([],s.wrap([this])),n=[],((i=this.contains(at))||this.contains(ct))&&(n=[new x("this")],i?(l="apply",n.push(new x("arguments"))):l="call",r=new z(r,[new t(new x(l))])),h=new o(r,n).compileNode(e),(r.isGenerator||(null!=(u=r.base)?u.isGenerator:void 0))&&(h.unshift(this.makeCode("(yield* ")),h.push(this.makeCode(")"))),h},e.prototype.cache=function(e,t,n){var r,s,o;return r=null!=n?n(this):this.isComplex(),r?(s=new x(e.scope.freeVariable("ref")),o=new i(s,this),t?[o.compileToFragments(e,t),[this.makeCode(s.value)]]:[o,s]):(s=t?this.compileToFragments(e,t):this,[s,s])},e.prototype.cacheToCodeFragments=function(e){return[st(e[0]),st(e[1])]},e.prototype.makeReturn=function(e){var t;return t=this.unwrapAll(),e?new o(new x(e+".push"),[t]):new M(t)},e.prototype.contains=function(e){var t;return t=void 0,this.traverseChildren(!1,function(n){return e(n)?(t=n,!1):void 0}),t},e.prototype.lastNonComment=function(e){var t;for(t=e.length;t--;)if(!(e[t]instanceof h))return e[t];return null},e.prototype.toString=function(e,t){var n;return null==e&&(e=""),null==t&&(t=this.constructor.name),n="\n"+e+t,this.soak&&(n+="?"),this.eachChild(function(t){return n+=t.toString(e+q)}),n},e.prototype.eachChild=function(e){var t,n,i,r,s,o,a,c;if(!this.children)return this;for(a=this.children,i=0,s=a.length;s>i;i++)if(t=a[i],this[t])for(c=rt([this[t]]),r=0,o=c.length;o>r;r++)if(n=c[r],e(n)===!1)return this;return this},e.prototype.traverseChildren=function(e,t){return this.eachChild(function(n){var i;return i=t(n),i!==!1?n.traverseChildren(e,t):void 0})},e.prototype.invert=function(){return new I("!",this)},e.prototype.unwrapAll=function(){var e;for(e=this;e!==(e=e.unwrap()););return e},e.prototype.children=[],e.prototype.isStatement=D,e.prototype.jumps=D,e.prototype.isComplex=Q,e.prototype.isChainable=D,e.prototype.isAssignable=D,e.prototype.unwrap=X,e.prototype.unfoldSoak=D,e.prototype.assigns=D,e.prototype.updateLocationDataIfMissing=function(e){return this.locationData?this:(this.locationData=e,this.eachChild(function(t){return t.updateLocationDataIfMissing(e)}))},e.prototype.error=function(e){return vt(e,this.locationData)},e.prototype.makeCode=function(e){return new l(this,e)},e.prototype.wrapInBraces=function(e){return[].concat(this.makeCode("("),e,this.makeCode(")"))},e.prototype.joinFragmentArrays=function(e,t){var n,i,r,s,o;for(n=[],r=s=0,o=e.length;o>s;r=++s)i=e[r],r&&n.push(this.makeCode(t)),n=n.concat(i);return n},e}(),e.Block=s=function(e){function t(e){this.expressions=et(rt(e||[]))}return kt(t,e),t.prototype.children=["expressions"],t.prototype.push=function(e){return this.expressions.push(e),this},t.prototype.pop=function(){return this.expressions.pop()},t.prototype.unshift=function(e){return this.expressions.unshift(e),this},t.prototype.unwrap=function(){return 1===this.expressions.length?this.expressions[0]:this},t.prototype.isEmpty=function(){return!this.expressions.length},t.prototype.isStatement=function(e){var t,n,i,r;for(r=this.expressions,n=0,i=r.length;i>n;n++)if(t=r[n],t.isStatement(e))return!0;return!1},t.prototype.jumps=function(e){var t,n,i,r,s;for(s=this.expressions,n=0,r=s.length;r>n;n++)if(t=s[n],i=t.jumps(e))return i},t.prototype.makeReturn=function(e){var t,n;for(n=this.expressions.length;n--;)if(t=this.expressions[n],!(t instanceof h)){this.expressions[n]=t.makeReturn(e),t instanceof M&&!t.expression&&this.expressions.splice(n,1);break}return this},t.prototype.compileToFragments=function(e,n){return null==e&&(e={}),e.scope?t.__super__.compileToFragments.call(this,e,n):this.compileRoot(e)},t.prototype.compileNode=function(e){var n,i,r,s,o,a,c,l,h;for(this.tab=e.indent,h=e.level===L,i=[],l=this.expressions,s=o=0,a=l.length;a>o;s=++o)c=l[s],c=c.unwrapAll(),c=c.unfoldSoak(e)||c,c instanceof t?i.push(c.compileNode(e)):h?(c.front=!0,r=c.compileToFragments(e),c.isStatement(e)||(r.unshift(this.makeCode(""+this.tab)),r.push(this.makeCode(";"))),i.push(r)):i.push(c.compileToFragments(e,F));return h?this.spaced?[].concat(this.joinFragmentArrays(i,"\n\n"),this.makeCode("\n")):this.joinFragmentArrays(i,"\n"):(n=i.length?this.joinFragmentArrays(i,", "):[this.makeCode("void 0")],i.length>1&&e.level>=F?this.wrapInBraces(n):n)},t.prototype.compileRoot=function(e){var t,n,i,r,s,o,a,c,l,u,p;for(e.indent=e.bare?"":q,e.level=L,this.spaced=!0,e.scope=new P(null,this,null,null!=(l=e.referencedVars)?l:[]),u=e.locals||[],r=0,s=u.length;s>r;r++)o=u[r],e.scope.parameter(o);return a=[],e.bare||(c=function(){var e,n,r,s;for(r=this.expressions,s=[],i=e=0,n=r.length;n>e&&(t=r[i],t.unwrap()instanceof h);i=++e)s.push(t);return s}.call(this),p=this.expressions.slice(c.length),this.expressions=c,c.length&&(a=this.compileNode(ht(e,{indent:""})),a.push(this.makeCode("\n"))),this.expressions=p),n=this.compileWithDeclarations(e),e.bare?n:[].concat(a,this.makeCode("(function() {\n"),n,this.makeCode("\n}).call(this);\n"))},t.prototype.compileWithDeclarations=function(e){var t,n,i,r,s,o,a,c,l,u,p,d,f,m;for(r=[],c=[],l=this.expressions,s=o=0,a=l.length;a>o&&(i=l[s],i=i.unwrap(),i instanceof h||i instanceof x);s=++o);return e=ht(e,{level:L}),s&&(d=this.expressions.splice(s,9e9),u=[this.spaced,!1],m=u[0],this.spaced=u[1],p=[this.compileNode(e),m],r=p[0],this.spaced=p[1],this.expressions=d),c=this.compileNode(e),f=e.scope,f.expressions===this&&(n=e.scope.hasDeclarations(),t=f.hasAssignments,n||t?(s&&r.push(this.makeCode("\n")),r.push(this.makeCode(this.tab+"var ")),n&&r.push(this.makeCode(f.declaredVariables().join(", "))),t&&(n&&r.push(this.makeCode(",\n"+(this.tab+q))),r.push(this.makeCode(f.assignedVariables().join(",\n"+(this.tab+q))))),r.push(this.makeCode(";\n"+(this.spaced?"\n":"")))):r.length&&c.length&&r.push(this.makeCode("\n"))),r.concat(c)},t.wrap=function(e){return 1===e.length&&e[0]instanceof t?e[0]:new t(e)},t}(r),e.Literal=x=function(e){function t(e){this.value=e}return kt(t,e),t.prototype.makeReturn=function(){return this.isStatement()?this:t.__super__.makeReturn.apply(this,arguments)},t.prototype.isAssignable=function(){return g.test(this.value)},t.prototype.isStatement=function(){var e;return"break"===(e=this.value)||"continue"===e||"debugger"===e},t.prototype.isComplex=D,t.prototype.assigns=function(e){return e===this.value},t.prototype.jumps=function(e){return"break"!==this.value||(null!=e?e.loop:void 0)||(null!=e?e.block:void 0)?"continue"!==this.value||(null!=e?e.loop:void 0)?void 0:this:this},t.prototype.compileNode=function(e){var t,n,i;return n="this"===this.value?(null!=(i=e.scope.method)?i.bound:void 0)?e.scope.method.context:this.value:this.value.reserved?'"'+this.value+'"':this.value,t=this.isStatement()?""+this.tab+n+";":n,[this.makeCode(t)]},t.prototype.toString=function(){return' "'+this.value+'"'},t}(r),e.Undefined=function(e){function t(){return t.__super__.constructor.apply(this,arguments)}return kt(t,e),t.prototype.isAssignable=D,t.prototype.isComplex=D,t.prototype.compileNode=function(e){return[this.makeCode(e.level>=T?"(void 0)":"void 0")]},t}(r),e.Null=function(e){function t(){return t.__super__.constructor.apply(this,arguments)}return kt(t,e),t.prototype.isAssignable=D,t.prototype.isComplex=D,t.prototype.compileNode=function(){return[this.makeCode("null")]},t}(r),e.Bool=function(e){function t(e){this.val=e}return kt(t,e),t.prototype.isAssignable=D,t.prototype.isComplex=D,t.prototype.compileNode=function(){return[this.makeCode(this.val)]},t}(r),e.Return=M=function(e){function t(e){this.expression=e}return kt(t,e),t.prototype.children=["expression"],t.prototype.isStatement=Q,t.prototype.makeReturn=X,t.prototype.jumps=X,t.prototype.compileToFragments=function(e,n){var i,r;return i=null!=(r=this.expression)?r.makeReturn():void 0,!i||i instanceof t?t.__super__.compileToFragments.call(this,e,n):i.compileToFragments(e,n)},t.prototype.compileNode=function(e){var t,n,i;return t=[],n=null!=(i=this.expression)?"function"==typeof i.isYieldReturn?i.isYieldReturn():void 0:void 0,n||t.push(this.makeCode(this.tab+("return"+(this.expression?" ":"")))),this.expression&&(t=t.concat(this.expression.compileToFragments(e,N))),n||t.push(this.makeCode(";")),t},t}(r),e.Value=z=function(e){function t(e,n,i){return!n&&e instanceof t?e:(this.base=e,this.properties=n||[],i&&(this[i]=!0),this)}return kt(t,e),t.prototype.children=["base","properties"],t.prototype.add=function(e){return this.properties=this.properties.concat(e),this},t.prototype.hasProperties=function(){return!!this.properties.length},t.prototype.bareLiteral=function(e){return!this.properties.length&&this.base instanceof e},t.prototype.isArray=function(){return this.bareLiteral(n)},t.prototype.isRange=function(){return this.bareLiteral(j)},t.prototype.isComplex=function(){return this.hasProperties()||this.base.isComplex()},t.prototype.isAssignable=function(){return this.hasProperties()||this.base.isAssignable()},t.prototype.isSimpleNumber=function(){return this.bareLiteral(x)&&B.test(this.base.value)},t.prototype.isString=function(){return this.bareLiteral(x)&&b.test(this.base.value)},t.prototype.isRegex=function(){return this.bareLiteral(x)&&v.test(this.base.value)},t.prototype.isAtomic=function(){var e,t,n,i;for(i=this.properties.concat(this.base),e=0,t=i.length;t>e;e++)if(n=i[e],n.soak||n instanceof o)return!1;return!0},t.prototype.isNotCallable=function(){return this.isSimpleNumber()||this.isString()||this.isRegex()||this.isArray()||this.isRange()||this.isSplice()||this.isObject()},t.prototype.isStatement=function(e){return!this.properties.length&&this.base.isStatement(e)},t.prototype.assigns=function(e){return!this.properties.length&&this.base.assigns(e)},t.prototype.jumps=function(e){return!this.properties.length&&this.base.jumps(e)},t.prototype.isObject=function(e){return this.properties.length?!1:this.base instanceof A&&(!e||this.base.generated)},t.prototype.isSplice=function(){var e,t;return t=this.properties,e=t[t.length-1],e instanceof U},t.prototype.looksStatic=function(e){var t;return this.base.value===e&&1===this.properties.length&&"prototype"!==(null!=(t=this.properties[0].name)?t.value:void 0)},t.prototype.unwrap=function(){return this.properties.length?this:this.base},t.prototype.cacheReference=function(e){var n,r,s,o,a;return a=this.properties,s=a[a.length-1],2>this.properties.length&&!this.base.isComplex()&&!(null!=s?s.isComplex():void 0)?[this,this]:(n=new t(this.base,this.properties.slice(0,-1)),n.isComplex()&&(r=new x(e.scope.freeVariable("base")),n=new t(new O(new i(r,n)))),s?(s.isComplex()&&(o=new x(e.scope.freeVariable("name")),s=new w(new i(o,s.index)),o=new w(o)),[n.add(s),new t(r||n.base,[o||s])]):[n,r])},t.prototype.compileNode=function(e){var t,n,i,r,s;for(this.base.front=this.front,s=this.properties,t=this.base.compileToFragments(e,s.length?T:null),(this.base instanceof O||s.length)&&B.test(st(t))&&t.push(this.makeCode(".")),n=0,i=s.length;i>n;n++)r=s[n],t.push.apply(t,r.compileToFragments(e));return t},t.prototype.unfoldSoak=function(e){return null!=this.unfoldedSoak?this.unfoldedSoak:this.unfoldedSoak=function(n){return function(){var r,s,o,a,c,l,h,p,d,f;if(o=n.base.unfoldSoak(e))return(p=o.body.properties).push.apply(p,n.properties),o;for(d=n.properties,s=a=0,c=d.length;c>a;s=++a)if(l=d[s],l.soak)return l.soak=!1,r=new t(n.base,n.properties.slice(0,s)),f=new t(n.base,n.properties.slice(s)),r.isComplex()&&(h=new x(e.scope.freeVariable("ref")),r=new O(new i(h,r)),f.base=h),new y(new u(r),f,{soak:!0});return!1}}(this)()},t}(r),e.Comment=h=function(e){function t(e){this.comment=e}return kt(t,e),t.prototype.isStatement=Q,t.prototype.makeReturn=X,t.prototype.compileNode=function(e,t){var n,i;return i=this.comment.replace(/^(\s*)#(?=\s)/gm,"$1 *"),n="/*"+ut(i,this.tab)+(Tt.call(i,"\n")>=0?"\n"+this.tab:"")+" */",(t||e.level)===L&&(n=e.indent+n),[this.makeCode("\n"),this.makeCode(n)]},t}(r),e.Call=o=function(e){function n(e,t,n){this.args=null!=t?t:[],this.soak=n,this.isNew=!1,this.isSuper="super"===e,this.variable=this.isSuper?null:e,e instanceof z&&e.isNotCallable()&&e.error("literal is not a function")}return kt(n,e),n.prototype.children=["variable","args"],n.prototype.newInstance=function(){var e,t;return e=(null!=(t=this.variable)?t.base:void 0)||this.variable,e instanceof n&&!e.isNew?e.newInstance():this.isNew=!0,this},n.prototype.superReference=function(e){var n,r,s,o,a,c,l,h;return a=e.scope.namedMethod(),(null!=a?a.klass:void 0)?(o=a.klass,c=a.name,h=a.variable,o.isComplex()&&(s=new x(e.scope.parent.freeVariable("base")),r=new z(new O(new i(s,o))),h.base=r,h.properties.splice(0,o.properties.length)),(c.isComplex()||c instanceof w&&c.index.isAssignable())&&(l=new x(e.scope.parent.freeVariable("name")),c=new w(new i(l,c.index)),h.properties.pop(),h.properties.push(c)),n=[new t(new x("__super__"))],a["static"]&&n.push(new t(new x("constructor"))),n.push(null!=l?new w(l):c),new z(null!=s?s:o,n).compile(e)):(null!=a?a.ctor:void 0)?a.name+".__super__.constructor":this.error("cannot call super outside of an instance method.")},n.prototype.superThis=function(e){var t;return t=e.scope.method,t&&!t.klass&&t.context||"this"},n.prototype.unfoldSoak=function(e){var t,i,r,s,o,a,c,l,h;if(this.soak){if(this.variable){if(i=bt(e,this,"variable"))return i;c=new z(this.variable).cacheReference(e),s=c[0],h=c[1]}else s=new x(this.superReference(e)),h=new z(s);return h=new n(h,this.args),h.isNew=this.isNew,s=new x("typeof "+s.compile(e)+' === "function"'),new y(s,new z(h),{soak:!0})}for(t=this,a=[];;)if(t.variable instanceof n)a.push(t),t=t.variable;else{if(!(t.variable instanceof z))break;if(a.push(t),!((t=t.variable.base)instanceof n))break}for(l=a.reverse(),r=0,o=l.length;o>r;r++)t=l[r],i&&(t.variable instanceof n?t.variable=i:t.variable.base=i),i=bt(e,t,"variable");return i},n.prototype.compileNode=function(e){var t,n,i,r,s,o,a,c,l,h;if(null!=(l=this.variable)&&(l.front=this.front),r=G.compileSplattedArray(e,this.args,!0),r.length)return this.compileSplat(e,r);for(i=[],h=this.args,n=o=0,a=h.length;a>o;n=++o)t=h[n],n&&i.push(this.makeCode(", ")),i.push.apply(i,t.compileToFragments(e,F));return s=[],this.isSuper?(c=this.superReference(e)+(".call("+this.superThis(e)),i.length&&(c+=", "),s.push(this.makeCode(c))):(this.isNew&&s.push(this.makeCode("new ")),s.push.apply(s,this.variable.compileToFragments(e,T)),s.push(this.makeCode("("))),s.push.apply(s,i),s.push(this.makeCode(")")),s},n.prototype.compileSplat=function(e,t){var n,i,r,s,o,a;return this.isSuper?[].concat(this.makeCode(this.superReference(e)+".apply("+this.superThis(e)+", "),t,this.makeCode(")")):this.isNew?(s=this.tab+q,[].concat(this.makeCode("(function(func, args, ctor) {\n"+s+"ctor.prototype = func.prototype;\n"+s+"var child = new ctor, result = func.apply(child, args);\n"+s+"return Object(result) === result ? result : child;\n"+this.tab+"})("),this.variable.compileToFragments(e,F),this.makeCode(", "),t,this.makeCode(", function(){})"))):(n=[],i=new z(this.variable),(o=i.properties.pop())&&i.isComplex()?(a=e.scope.freeVariable("ref"),n=n.concat(this.makeCode("("+a+" = "),i.compileToFragments(e,F),this.makeCode(")"),o.compileToFragments(e))):(r=i.compileToFragments(e,T),B.test(st(r))&&(r=this.wrapInBraces(r)),o?(a=st(r),r.push.apply(r,o.compileToFragments(e))):a="null",n=n.concat(r)),n=n.concat(this.makeCode(".apply("+a+", "),t,this.makeCode(")")))},n}(r),e.Extends=d=function(e){function t(e,t){this.child=e,this.parent=t}return kt(t,e),t.prototype.children=["child","parent"],t.prototype.compileToFragments=function(e){return new o(new z(new x(yt("extend",e))),[this.child,this.parent]).compileToFragments(e)},t}(r),e.Access=t=function(e){function t(e,t){this.name=e,this.name.asKey=!0,this.soak="soak"===t}return kt(t,e),t.prototype.children=["name"],t.prototype.compileToFragments=function(e){var t;return t=this.name.compileToFragments(e),g.test(st(t))?t.unshift(this.makeCode(".")):(t.unshift(this.makeCode("[")),t.push(this.makeCode("]"))),t},t.prototype.isComplex=D,t}(r),e.Index=w=function(e){function t(e){this.index=e}return kt(t,e),t.prototype.children=["index"],t.prototype.compileToFragments=function(e){return[].concat(this.makeCode("["),this.index.compileToFragments(e,N),this.makeCode("]"))},t.prototype.isComplex=function(){return this.index.isComplex()},t}(r),e.Range=j=function(e){function t(e,t,n){this.from=e,this.to=t,this.exclusive="exclusive"===n,this.equals=this.exclusive?"":"="}return kt(t,e),t.prototype.children=["from","to"],t.prototype.compileVariables=function(e){var t,n,i,r,s,o;return e=ht(e,{top:!0}),t=tt(e,"isComplex"),n=this.cacheToCodeFragments(this.from.cache(e,F,t)),this.fromC=n[0],this.fromVar=n[1],i=this.cacheToCodeFragments(this.to.cache(e,F,t)),this.toC=i[0],this.toVar=i[1],(o=tt(e,"step"))&&(r=this.cacheToCodeFragments(o.cache(e,F,t)),this.step=r[0],this.stepVar=r[1]),s=[this.fromVar.match(R),this.toVar.match(R)],this.fromNum=s[0],this.toNum=s[1],this.stepVar?this.stepNum=this.stepVar.match(R):void 0},t.prototype.compileNode=function(e){var t,n,i,r,s,o,a,c,l,h,u,p,d,f;return this.fromVar||this.compileVariables(e),e.index?(a=this.fromNum&&this.toNum,s=tt(e,"index"),o=tt(e,"name"),l=o&&o!==s,f=s+" = "+this.fromC,this.toC!==this.toVar&&(f+=", "+this.toC),this.step!==this.stepVar&&(f+=", "+this.step),h=[s+" <"+this.equals,s+" >"+this.equals],c=h[0],r=h[1],n=this.stepNum?pt(this.stepNum[0])>0?c+" "+this.toVar:r+" "+this.toVar:a?(u=[pt(this.fromNum[0]),pt(this.toNum[0])],i=u[0],d=u[1],u,d>=i?c+" "+d:r+" "+d):(t=this.stepVar?this.stepVar+" > 0":this.fromVar+" <= "+this.toVar,t+" ? "+c+" "+this.toVar+" : "+r+" "+this.toVar),p=this.stepVar?s+" += "+this.stepVar:a?l?d>=i?"++"+s:"--"+s:d>=i?s+"++":s+"--":l?t+" ? ++"+s+" : --"+s:t+" ? "+s+"++ : "+s+"--",l&&(f=o+" = "+f),l&&(p=o+" = "+p),[this.makeCode(f+"; "+n+"; "+p)]):this.compileArray(e)},t.prototype.compileArray=function(e){var t,n,i,r,s,o,a,c,l,h,u,p,d;return this.fromNum&&this.toNum&&20>=Math.abs(this.fromNum-this.toNum)?(l=function(){p=[];for(var e=h=+this.fromNum,t=+this.toNum;t>=h?t>=e:e>=t;t>=h?e++:e--)p.push(e);return p}.apply(this),this.exclusive&&l.pop(),[this.makeCode("["+l.join(", ")+"]")]):(o=this.tab+q,s=e.scope.freeVariable("i",{single:!0}),u=e.scope.freeVariable("results"),c="\n"+o+u+" = [];",this.fromNum&&this.toNum?(e.index=s,n=st(this.compileNode(e))):(d=s+" = "+this.fromC+(this.toC!==this.toVar?", "+this.toC:""),i=this.fromVar+" <= "+this.toVar,n="var "+d+"; "+i+" ? "+s+" <"+this.equals+" "+this.toVar+" : "+s+" >"+this.equals+" "+this.toVar+"; "+i+" ? "+s+"++ : "+s+"--"),a="{ "+u+".push("+s+"); }\n"+o+"return "+u+";\n"+e.indent,r=function(e){return null!=e?e.contains(at):void 0},(r(this.from)||r(this.to))&&(t=", arguments"),[this.makeCode("(function() {"+c+"\n"+o+"for ("+n+")"+a+"}).apply(this"+(null!=t?t:"")+")")])},t}(r),e.Slice=U=function(e){function t(e){this.range=e,t.__super__.constructor.call(this)}return kt(t,e),t.prototype.children=["range"],t.prototype.compileNode=function(e){var t,n,i,r,s,o,a;return s=this.range,o=s.to,i=s.from,r=i&&i.compileToFragments(e,N)||[this.makeCode("0")],o&&(t=o.compileToFragments(e,N),n=st(t),(this.range.exclusive||-1!==+n)&&(a=", "+(this.range.exclusive?n:B.test(n)?""+(+n+1):(t=o.compileToFragments(e,T),"+"+st(t)+" + 1 || 9e9")))),[this.makeCode(".slice("+st(r)+(a||"")+")")]},t}(r),e.Obj=A=function(e){function n(e,t){this.generated=null!=t?t:!1,this.objects=this.properties=e||[]}return kt(n,e),n.prototype.children=["properties"],n.prototype.compileNode=function(e){var n,r,s,o,a,c,l,u,p,d,f,m,g,v,b,y,k,w,T,C,F;if(T=this.properties,this.generated)for(l=0,g=T.length;g>l;l++)y=T[l],y instanceof z&&y.error("cannot have an implicit value in an implicit object");for(r=p=0,v=T.length;v>p&&(w=T[r],!((w.variable||w).base instanceof O));r=++p);for(s=T.length>r,a=e.indent+=q,m=this.lastNonComment(this.properties),n=[],s&&(k=e.scope.freeVariable("obj"),n.push(this.makeCode("(\n"+a+k+" = "))),n.push(this.makeCode("{"+(0===T.length||0===r?"}":"\n"))),o=f=0,b=T.length;b>f;o=++f)w=T[o],o===r&&(0!==o&&n.push(this.makeCode("\n"+a+"}")),n.push(this.makeCode(",\n"))),u=o===T.length-1||o===r-1?"":w===m||w instanceof h?"\n":",\n",c=w instanceof h?"":a,s&&r>o&&(c+=q),w instanceof i&&("object"!==w.context&&w.operatorToken.error("unexpected "+w.operatorToken.value),w.variable instanceof z&&w.variable.hasProperties()&&w.variable.error("invalid object key")),w instanceof z&&w["this"]&&(w=new i(w.properties[0].name,w,"object")),w instanceof h||(r>o?(w instanceof i||(w=new i(w,w,"object")),(w.variable.base||w.variable).asKey=!0):(w instanceof i?(d=w.variable,F=w.value):(C=w.base.cache(e),d=C[0],F=C[1]),w=new i(new z(new x(k),[new t(d)]),F))),c&&n.push(this.makeCode(c)),n.push.apply(n,w.compileToFragments(e,L)),u&&n.push(this.makeCode(u));return s?n.push(this.makeCode(",\n"+a+k+"\n"+this.tab+")")):0!==T.length&&n.push(this.makeCode("\n"+this.tab+"}")),this.front&&!s?this.wrapInBraces(n):n},n.prototype.assigns=function(e){var t,n,i,r;for(r=this.properties,t=0,n=r.length;n>t;t++)if(i=r[t],i.assigns(e))return!0;return!1},n}(r),e.Arr=n=function(e){function t(e){this.objects=e||[]}return kt(t,e),t.prototype.children=["objects"],t.prototype.compileNode=function(e){var t,n,i,r,s,o,a;if(!this.objects.length)return[this.makeCode("[]")];if(e.indent+=q,t=G.compileSplattedArray(e,this.objects),t.length)return t;for(t=[],n=function(){var t,n,i,r;for(i=this.objects,r=[],t=0,n=i.length;n>t;t++)a=i[t],r.push(a.compileToFragments(e,F));return r}.call(this),r=s=0,o=n.length;o>s;r=++s)i=n[r],r&&t.push(this.makeCode(", ")),t.push.apply(t,i);return st(t).indexOf("\n")>=0?(t.unshift(this.makeCode("[\n"+e.indent)),t.push(this.makeCode("\n"+this.tab+"]"))):(t.unshift(this.makeCode("[")),t.push(this.makeCode("]"))),t},t.prototype.assigns=function(e){var t,n,i,r;for(r=this.objects,t=0,n=r.length;n>t;t++)if(i=r[t],i.assigns(e))return!0;return!1},t}(r),e.Class=a=function(e){function n(e,t,n){this.variable=e,this.parent=t,this.body=null!=n?n:new s,this.boundFuncs=[],this.body.classBody=!0}return kt(n,e),n.prototype.children=["variable","parent","body"],n.prototype.determineName=function(){var e,n,i;return this.variable?(n=this.variable.properties,i=n[n.length-1],e=i?i instanceof t&&i.name.value:this.variable.base.value,Tt.call(V,e)>=0&&this.variable.error("class variable name may not be "+e),e&&(e=g.test(e)&&e)):null},n.prototype.setContext=function(e){return this.body.traverseChildren(!1,function(t){return t.classBody?!1:t instanceof x&&"this"===t.value?t.value=e:t instanceof c&&t.bound?t.context=e:void 0})},n.prototype.addBoundFunctions=function(e){var n,i,r,s,o;for(o=this.boundFuncs,i=0,r=o.length;r>i;i++)n=o[i],s=new z(new x("this"),[new t(n)]).compile(e),this.ctor.body.unshift(new x(s+" = "+yt("bind",e)+"("+s+", this)"))},n.prototype.addProperties=function(e,n,r){var s,o,a,l,h,u;return u=e.base.properties.slice(0),l=function(){var e;for(e=[];o=u.shift();)o instanceof i&&(a=o.variable.base,delete o.context,h=o.value,"constructor"===a.value?(this.ctor&&o.error("cannot define more than one constructor in a class"),h.bound&&o.error("cannot define a constructor as a bound function"),h instanceof c?o=this.ctor=h:(this.externalCtor=r.classScope.freeVariable("class"),o=new i(new x(this.externalCtor),h))):o.variable["this"]?h["static"]=!0:(s=a.isComplex()?new w(a):new t(a),o.variable=new z(new x(n),[new t(new x("prototype")),s]),h instanceof c&&h.bound&&(this.boundFuncs.push(a),h.bound=!1))),e.push(o);return e}.call(this),et(l)},n.prototype.walkBody=function(e,t){return this.traverseChildren(!1,function(r){return function(o){var a,c,l,h,u,p,d;if(a=!0,o instanceof n)return!1;if(o instanceof s){for(d=c=o.expressions,l=h=0,u=d.length;u>h;l=++h)p=d[l],p instanceof i&&p.variable.looksStatic(e)?p.value["static"]=!0:p instanceof z&&p.isObject(!0)&&(a=!1,c[l]=r.addProperties(p,e,t));o.expressions=c=rt(c)}return a&&!(o instanceof n)}}(this))},n.prototype.hoistDirectivePrologue=function(){var e,t,n;for(t=0,e=this.body.expressions;(n=e[t])&&n instanceof h||n instanceof z&&n.isString();)++t;return this.directives=e.splice(0,t)},n.prototype.ensureConstructor=function(e){return this.ctor||(this.ctor=new c,this.externalCtor?this.ctor.body.push(new x(this.externalCtor+".apply(this, arguments)")):this.parent&&this.ctor.body.push(new x(e+".__super__.constructor.apply(this, arguments)")),this.ctor.body.makeReturn(),this.body.expressions.unshift(this.ctor)),this.ctor.ctor=this.ctor.name=e,this.ctor.klass=null,this.ctor.noReturn=!0},n.prototype.compileNode=function(e){var t,n,r,a,l,h,u,p,f;return(a=this.body.jumps())&&a.error("Class bodies cannot contain pure statements"),(n=this.body.contains(at))&&n.error("Class bodies shouldn't reference arguments"),u=this.determineName()||"_Class",u.reserved&&(u="_"+u),h=new x(u),r=new c([],s.wrap([this.body])),t=[],e.classScope=r.makeScope(e.scope),this.hoistDirectivePrologue(),this.setContext(u),this.walkBody(u,e),this.ensureConstructor(u),this.addBoundFunctions(e),this.body.spaced=!0,this.body.expressions.push(h),this.parent&&(f=new x(e.classScope.freeVariable("superClass",{reserve:!1})),this.body.expressions.unshift(new d(h,f)),r.params.push(new _(f)),t.push(this.parent)),(p=this.body.expressions).unshift.apply(p,this.directives),l=new O(new o(r,t)),this.variable&&(l=new i(this.variable,l)),l.compileToFragments(e)},n}(r),e.Assign=i=function(e){function n(e,t,n,i){var r,s,o;this.variable=e,this.value=t,this.context=n,null==i&&(i={}),this.param=i.param,this.subpattern=i.subpattern,this.operatorToken=i.operatorToken,o=s=this.variable.unwrapAll().value,r=Tt.call(V,o)>=0,r&&"object"!==this.context&&this.variable.error('variable name may not be "'+s+'"') -}return kt(n,e),n.prototype.children=["variable","value"],n.prototype.isStatement=function(e){return(null!=e?e.level:void 0)===L&&null!=this.context&&Tt.call(this.context,"?")>=0},n.prototype.assigns=function(e){return this["object"===this.context?"value":"variable"].assigns(e)},n.prototype.unfoldSoak=function(e){return bt(e,this,"variable")},n.prototype.compileNode=function(e){var t,n,i,r,s,o,a,l,h,u,p,d,f,m;if(i=this.variable instanceof z){if(this.variable.isArray()||this.variable.isObject())return this.compilePatternMatch(e);if(this.variable.isSplice())return this.compileSplice(e);if("||="===(l=this.context)||"&&="===l||"?="===l)return this.compileConditional(e);if("**="===(h=this.context)||"//="===h||"%%="===h)return this.compileSpecialMath(e)}return this.value instanceof c&&(this.value["static"]?(this.value.klass=this.variable.base,this.value.name=this.variable.properties[0],this.value.variable=this.variable):(null!=(u=this.variable.properties)?u.length:void 0)>=2&&(p=this.variable.properties,o=p.length>=3?Ct.call(p,0,r=p.length-2):(r=0,[]),a=p[r++],s=p[r++],"prototype"===(null!=(d=a.name)?d.value:void 0)&&(this.value.klass=new z(this.variable.base,o),this.value.name=s,this.value.variable=this.variable))),this.context||(m=this.variable.unwrapAll(),m.isAssignable()||this.variable.error('"'+this.variable.compile(e)+'" cannot be assigned'),("function"==typeof m.hasProperties?m.hasProperties():void 0)||(this.param?e.scope.add(m.value,"var"):e.scope.find(m.value))),f=this.value.compileToFragments(e,F),i&&this.variable.base instanceof A&&(this.variable.front=!0),n=this.variable.compileToFragments(e,F),"object"===this.context?n.concat(this.makeCode(": "),f):(t=n.concat(this.makeCode(" "+(this.context||"=")+" "),f),F>=e.level?t:this.wrapInBraces(t))},n.prototype.compilePatternMatch=function(e){var i,r,s,o,a,c,l,h,u,d,f,m,v,b,y,k,T,C,N,S,D,R,A,_,O,j,M,B;if(_=e.level===L,j=this.value,y=this.variable.base.objects,!(k=y.length))return s=j.compileToFragments(e),e.level>=E?this.wrapInBraces(s):s;if(b=y[0],1===k&&b instanceof p&&b.error("Destructuring assignment has no target"),u=this.variable.isObject(),_&&1===k&&!(b instanceof G))return o=null,b instanceof n&&"object"===b.context?(C=b,N=C.variable,h=N.base,b=C.value,b instanceof n&&(o=b.value,b=b.variable)):(b instanceof n&&(o=b.value,b=b.variable),h=u?b["this"]?b.properties[0].name:b:new x(0)),i=g.test(h.unwrap().value),j=new z(j),j.properties.push(new(i?t:w)(h)),S=b.unwrap().value,Tt.call($,S)>=0&&b.error("assignment to a reserved word: "+b.compile(e)),o&&(j=new I("?",j,o)),new n(b,j,null,{param:this.param}).compileToFragments(e,L);for(M=j.compileToFragments(e,F),B=st(M),r=[],a=!1,(!g.test(B)||this.variable.assigns(B))&&(r.push([this.makeCode((T=e.scope.freeVariable("ref"))+" = ")].concat(Ct.call(M))),M=[this.makeCode(T)],B=T),l=f=0,m=y.length;m>f;l=++f){if(b=y[l],h=l,!a&&b instanceof G)v=b.name.unwrap().value,b=b.unwrap(),O=k+" <= "+B+".length ? "+yt("slice",e)+".call("+B+", "+l,(A=k-l-1)?(d=e.scope.freeVariable("i",{single:!0}),O+=", "+d+" = "+B+".length - "+A+") : ("+d+" = "+l+", [])"):O+=") : []",O=new x(O),a=d+"++";else{if(!a&&b instanceof p){(A=k-l-1)&&(1===A?a=B+".length - 1":(d=e.scope.freeVariable("i",{single:!0}),O=new x(d+" = "+B+".length - "+A),a=d+"++",r.push(O.compileToFragments(e,F))));continue}(b instanceof G||b instanceof p)&&b.error("multiple splats/expansions are disallowed in an assignment"),o=null,b instanceof n&&"object"===b.context?(D=b,R=D.variable,h=R.base,b=D.value,b instanceof n&&(o=b.value,b=b.variable)):(b instanceof n&&(o=b.value,b=b.variable),h=u?b["this"]?b.properties[0].name:b:new x(a||h)),v=b.unwrap().value,i=g.test(h.unwrap().value),O=new z(new x(B),[new(i?t:w)(h)]),o&&(O=new I("?",O,o))}null!=v&&Tt.call($,v)>=0&&b.error("assignment to a reserved word: "+b.compile(e)),r.push(new n(b,O,null,{param:this.param,subpattern:!0}).compileToFragments(e,F))}return _||this.subpattern||r.push(M),c=this.joinFragmentArrays(r,", "),F>e.level?c:this.wrapInBraces(c)},n.prototype.compileConditional=function(e){var t,i,r,s;return r=this.variable.cacheReference(e),i=r[0],s=r[1],!i.properties.length&&i.base instanceof x&&"this"!==i.base.value&&!e.scope.check(i.base.value)&&this.variable.error('the variable "'+i.base.value+"\" can't be assigned with "+this.context+" because it has not been declared before"),Tt.call(this.context,"?")>=0?(e.isExistentialEquals=!0,new y(new u(i),s,{type:"if"}).addElse(new n(s,this.value,"=")).compileToFragments(e)):(t=new I(this.context.slice(0,-1),i,new n(s,this.value,"=")).compileToFragments(e),F>=e.level?t:this.wrapInBraces(t))},n.prototype.compileSpecialMath=function(e){var t,i,r;return i=this.variable.cacheReference(e),t=i[0],r=i[1],new n(t,new I(this.context.slice(0,-1),r,this.value)).compileToFragments(e)},n.prototype.compileSplice=function(e){var t,n,i,r,s,o,a,c,l,h,u,p;return a=this.variable.properties.pop().range,i=a.from,h=a.to,n=a.exclusive,o=this.variable.compile(e),i?(c=this.cacheToCodeFragments(i.cache(e,E)),r=c[0],s=c[1]):r=s="0",h?i instanceof z&&i.isSimpleNumber()&&h instanceof z&&h.isSimpleNumber()?(h=h.compile(e)-s,n||(h+=1)):(h=h.compile(e,T)+" - "+s,n||(h+=" + 1")):h="9e9",l=this.value.cache(e,F),u=l[0],p=l[1],t=[].concat(this.makeCode("[].splice.apply("+o+", ["+r+", "+h+"].concat("),u,this.makeCode(")), "),p),e.level>L?this.wrapInBraces(t):t},n}(r),e.Code=c=function(e){function t(e,t,n){this.params=e||[],this.body=t||new s,this.bound="boundfunc"===n,this.isGenerator=!!this.body.contains(function(e){var t;return e instanceof I&&("yield"===(t=e.operator)||"yield*"===t)})}return kt(t,e),t.prototype.children=["params","body"],t.prototype.isStatement=function(){return!!this.ctor},t.prototype.jumps=D,t.prototype.makeScope=function(e){return new P(e,this.body,this)},t.prototype.compileNode=function(e){var r,a,c,l,h,u,d,f,m,g,v,b,k,w,C,F,E,N,L,S,D,R,A,O,$,j,M,B,V,P,U,G,H;if(this.bound&&(null!=(A=e.scope.method)?A.bound:void 0)&&(this.context=e.scope.method.context),this.bound&&!this.context)return this.context="_this",H=new t([new _(new x(this.context))],new s([this])),a=new o(H,[new x("this")]),a.updateLocationDataIfMissing(this.locationData),a.compileNode(e);for(e.scope=tt(e,"classScope")||this.makeScope(e.scope),e.scope.shared=tt(e,"sharedScope"),e.indent+=q,delete e.bare,delete e.isExistentialEquals,L=[],l=[],O=this.params,u=0,m=O.length;m>u;u++)N=O[u],N instanceof p||e.scope.parameter(N.asReference(e));for($=this.params,d=0,g=$.length;g>d;d++)if(N=$[d],N.splat||N instanceof p){for(j=this.params,f=0,v=j.length;v>f;f++)E=j[f],E instanceof p||!E.name.value||e.scope.add(E.name.value,"var",!0);V=new i(new z(new n(function(){var t,n,i,r;for(i=this.params,r=[],n=0,t=i.length;t>n;n++)E=i[n],r.push(E.asReference(e));return r}.call(this))),new z(new x("arguments")));break}for(M=this.params,F=0,b=M.length;b>F;F++)N=M[F],N.isComplex()?(U=R=N.asReference(e),N.value&&(U=new I("?",R,N.value)),l.push(new i(new z(N.name),U,"=",{param:!0}))):(R=N,N.value&&(C=new x(R.name.value+" == null"),U=new i(new z(N.name),N.value,"="),l.push(new y(C,U)))),V||L.push(R);for(G=this.body.isEmpty(),V&&l.unshift(V),l.length&&(B=this.body.expressions).unshift.apply(B,l),h=S=0,k=L.length;k>S;h=++S)E=L[h],L[h]=E.compileToFragments(e),e.scope.parameter(st(L[h]));for(P=[],this.eachParamName(function(e,t){return Tt.call(P,e)>=0&&t.error("multiple parameters named "+e),P.push(e)}),G||this.noReturn||this.body.makeReturn(),c="function",this.isGenerator&&(c+="*"),this.ctor&&(c+=" "+this.name),c+="(",r=[this.makeCode(c)],h=D=0,w=L.length;w>D;h=++D)E=L[h],h&&r.push(this.makeCode(", ")),r.push.apply(r,E);return r.push(this.makeCode(") {")),this.body.isEmpty()||(r=r.concat(this.makeCode("\n"),this.body.compileWithDeclarations(e),this.makeCode("\n"+this.tab))),r.push(this.makeCode("}")),this.ctor?[this.makeCode(this.tab)].concat(Ct.call(r)):this.front||e.level>=T?this.wrapInBraces(r):r},t.prototype.eachParamName=function(e){var t,n,i,r,s;for(r=this.params,s=[],t=0,n=r.length;n>t;t++)i=r[t],s.push(i.eachName(e));return s},t.prototype.traverseChildren=function(e,n){return e?t.__super__.traverseChildren.call(this,e,n):void 0},t}(r),e.Param=_=function(e){function t(e,t,n){var i,r,s;this.name=e,this.value=t,this.splat=n,r=i=this.name.unwrapAll().value,Tt.call(V,r)>=0&&this.name.error('parameter name "'+i+'" is not allowed'),this.name instanceof A&&this.name.generated&&(s=this.name.objects[0].operatorToken,s.error("unexpected "+s.value))}return kt(t,e),t.prototype.children=["name","value"],t.prototype.compileToFragments=function(e){return this.name.compileToFragments(e,F)},t.prototype.asReference=function(e){var t,n;return this.reference?this.reference:(n=this.name,n["this"]?(t=n.properties[0].name.value,t.reserved&&(t="_"+t),n=new x(e.scope.freeVariable(t))):n.isComplex()&&(n=new x(e.scope.freeVariable("arg"))),n=new z(n),this.splat&&(n=new G(n)),n.updateLocationDataIfMissing(this.locationData),this.reference=n)},t.prototype.isComplex=function(){return this.name.isComplex()},t.prototype.eachName=function(e,t){var n,r,s,o,a,c;if(null==t&&(t=this.name),n=function(t){return e("@"+t.properties[0].name.value,t)},t instanceof x)return e(t.value,t);if(t instanceof z)return n(t);for(c=t.objects,r=0,s=c.length;s>r;r++)a=c[r],a instanceof i&&null==a.context&&(a=a.variable),a instanceof i?this.eachName(e,a.value.unwrap()):a instanceof G?(o=a.name.unwrap(),e(o.value,o)):a instanceof z?a.isArray()||a.isObject()?this.eachName(e,a.base):a["this"]?n(a):e(a.base.value,a.base):a instanceof p||a.error("illegal parameter "+a.compile())},t}(r),e.Splat=G=function(e){function t(e){this.name=e.compile?e:new x(e)}return kt(t,e),t.prototype.children=["name"],t.prototype.isAssignable=Q,t.prototype.assigns=function(e){return this.name.assigns(e)},t.prototype.compileToFragments=function(e){return this.name.compileToFragments(e)},t.prototype.unwrap=function(){return this.name},t.compileSplattedArray=function(e,n,i){var r,s,o,a,c,l,h,u,p,d,f;for(h=-1;(f=n[++h])&&!(f instanceof t););if(h>=n.length)return[];if(1===n.length)return f=n[0],c=f.compileToFragments(e,F),i?c:[].concat(f.makeCode(yt("slice",e)+".call("),c,f.makeCode(")"));for(r=n.slice(h),l=u=0,d=r.length;d>u;l=++u)f=r[l],o=f.compileToFragments(e,F),r[l]=f instanceof t?[].concat(f.makeCode(yt("slice",e)+".call("),o,f.makeCode(")")):[].concat(f.makeCode("["),o,f.makeCode("]"));return 0===h?(f=n[0],a=f.joinFragmentArrays(r.slice(1),", "),r[0].concat(f.makeCode(".concat("),a,f.makeCode(")"))):(s=function(){var t,i,r,s;for(r=n.slice(0,h),s=[],t=0,i=r.length;i>t;t++)f=r[t],s.push(f.compileToFragments(e,F));return s}(),s=n[0].joinFragmentArrays(s,", "),a=n[h].joinFragmentArrays(r,", "),p=n[n.length-1],[].concat(n[0].makeCode("["),s,n[h].makeCode("].concat("),a,p.makeCode(")")))},t}(r),e.Expansion=p=function(e){function t(){return t.__super__.constructor.apply(this,arguments)}return kt(t,e),t.prototype.isComplex=D,t.prototype.compileNode=function(){return this.error("Expansion must be used inside a destructuring assignment or parameter list")},t.prototype.asReference=function(){return this},t.prototype.eachName=function(){},t}(r),e.While=J=function(e){function t(e,t){this.condition=(null!=t?t.invert:void 0)?e.invert():e,this.guard=null!=t?t.guard:void 0}return kt(t,e),t.prototype.children=["condition","guard","body"],t.prototype.isStatement=Q,t.prototype.makeReturn=function(e){return e?t.__super__.makeReturn.apply(this,arguments):(this.returns=!this.jumps({loop:!0}),this)},t.prototype.addBody=function(e){return this.body=e,this},t.prototype.jumps=function(){var e,t,n,i,r;if(e=this.body.expressions,!e.length)return!1;for(t=0,i=e.length;i>t;t++)if(r=e[t],n=r.jumps({loop:!0}))return n;return!1},t.prototype.compileNode=function(e){var t,n,i,r;return e.indent+=q,r="",n=this.body,n.isEmpty()?n=this.makeCode(""):(this.returns&&(n.makeReturn(i=e.scope.freeVariable("results")),r=""+this.tab+i+" = [];\n"),this.guard&&(n.expressions.length>1?n.expressions.unshift(new y(new O(this.guard).invert(),new x("continue"))):this.guard&&(n=s.wrap([new y(this.guard,n)]))),n=[].concat(this.makeCode("\n"),n.compileToFragments(e,L),this.makeCode("\n"+this.tab))),t=[].concat(this.makeCode(r+this.tab+"while ("),this.condition.compileToFragments(e,N),this.makeCode(") {"),n,this.makeCode("}")),this.returns&&t.push(this.makeCode("\n"+this.tab+"return "+i+";")),t},t}(r),e.Op=I=function(e){function n(e,t,n,i){if("in"===e)return new k(t,n);if("do"===e)return this.generateDo(t);if("new"===e){if(t instanceof o&&!t["do"]&&!t.isNew)return t.newInstance();(t instanceof c&&t.bound||t["do"])&&(t=new O(t))}return this.operator=r[e]||e,this.first=t,this.second=n,this.flip=!!i,this}var r,s;return kt(n,e),r={"==":"===","!=":"!==",of:"in",yieldfrom:"yield*"},s={"!==":"===","===":"!=="},n.prototype.children=["first","second"],n.prototype.isSimpleNumber=D,n.prototype.isYield=function(){var e;return"yield"===(e=this.operator)||"yield*"===e},n.prototype.isYieldReturn=function(){return this.isYield()&&this.first instanceof M},n.prototype.isUnary=function(){return!this.second},n.prototype.isComplex=function(){var e;return!(this.isUnary()&&("+"===(e=this.operator)||"-"===e)&&this.first instanceof z&&this.first.isSimpleNumber())},n.prototype.isChainable=function(){var e;return"<"===(e=this.operator)||">"===e||">="===e||"<="===e||"==="===e||"!=="===e},n.prototype.invert=function(){var e,t,i,r,o;if(this.isChainable()&&this.first.isChainable()){for(e=!0,t=this;t&&t.operator;)e&&(e=t.operator in s),t=t.first;if(!e)return new O(this).invert();for(t=this;t&&t.operator;)t.invert=!t.invert,t.operator=s[t.operator],t=t.first;return this}return(r=s[this.operator])?(this.operator=r,this.first.unwrap()instanceof n&&this.first.invert(),this):this.second?new O(this).invert():"!"===this.operator&&(i=this.first.unwrap())instanceof n&&("!"===(o=i.operator)||"in"===o||"instanceof"===o)?i:new n("!",this)},n.prototype.unfoldSoak=function(e){var t;return("++"===(t=this.operator)||"--"===t||"delete"===t)&&bt(e,this,"first")},n.prototype.generateDo=function(e){var t,n,r,s,a,l,h,u;for(l=[],n=e instanceof i&&(h=e.value.unwrap())instanceof c?h:e,u=n.params||[],r=0,s=u.length;s>r;r++)a=u[r],a.value?(l.push(a.value),delete a.value):l.push(a);return t=new o(e,l),t["do"]=!0,t},n.prototype.compileNode=function(e){var t,n,i,r,s,o;if(n=this.isChainable()&&this.first.isChainable(),n||(this.first.front=this.front),"delete"===this.operator&&e.scope.check(this.first.unwrapAll().value)&&this.error("delete operand may not be argument or var"),("--"===(r=this.operator)||"++"===r)&&(s=this.first.unwrapAll().value,Tt.call(V,s)>=0)&&this.error('cannot increment/decrement "'+this.first.unwrapAll().value+'"'),this.isYield())return this.compileYield(e);if(this.isUnary())return this.compileUnary(e);if(n)return this.compileChain(e);switch(this.operator){case"?":return this.compileExistence(e);case"**":return this.compilePower(e);case"//":return this.compileFloorDivision(e);case"%%":return this.compileModulo(e);default:return i=this.first.compileToFragments(e,E),o=this.second.compileToFragments(e,E),t=[].concat(i,this.makeCode(" "+this.operator+" "),o),E>=e.level?t:this.wrapInBraces(t)}},n.prototype.compileChain=function(e){var t,n,i,r;return i=this.first.second.cache(e),this.first.second=i[0],r=i[1],n=this.first.compileToFragments(e,E),t=n.concat(this.makeCode(" "+(this.invert?"&&":"||")+" "),r.compileToFragments(e),this.makeCode(" "+this.operator+" "),this.second.compileToFragments(e,E)),this.wrapInBraces(t)},n.prototype.compileExistence=function(e){var t,n;return this.first.isComplex()?(n=new x(e.scope.freeVariable("ref")),t=new O(new i(n,this.first))):(t=this.first,n=t),new y(new u(t),n,{type:"if"}).addElse(this.second).compileToFragments(e)},n.prototype.compileUnary=function(e){var t,i,r;return i=[],t=this.operator,i.push([this.makeCode(t)]),"!"===t&&this.first instanceof u?(this.first.negated=!this.first.negated,this.first.compileToFragments(e)):e.level>=T?new O(this).compileToFragments(e):(r="+"===t||"-"===t,("new"===t||"typeof"===t||"delete"===t||r&&this.first instanceof n&&this.first.operator===t)&&i.push([this.makeCode(" ")]),(r&&this.first instanceof n||"new"===t&&this.first.isStatement(e))&&(this.first=new O(this.first)),i.push(this.first.compileToFragments(e,E)),this.flip&&i.reverse(),this.joinFragmentArrays(i,""))},n.prototype.compileYield=function(e){var t,n;return n=[],t=this.operator,null==e.scope.parent&&this.error("yield statements must occur within a function generator."),Tt.call(Object.keys(this.first),"expression")>=0&&!(this.first instanceof W)?this.isYieldReturn()?n.push(this.first.compileToFragments(e,L)):null!=this.first.expression&&n.push(this.first.expression.compileToFragments(e,E)):(n.push([this.makeCode("("+t+" ")]),n.push(this.first.compileToFragments(e,E)),n.push([this.makeCode(")")])),this.joinFragmentArrays(n,"")},n.prototype.compilePower=function(e){var n;return n=new z(new x("Math"),[new t(new x("pow"))]),new o(n,[this.first,this.second]).compileToFragments(e)},n.prototype.compileFloorDivision=function(e){var i,r;return r=new z(new x("Math"),[new t(new x("floor"))]),i=new n("/",this.first,this.second),new o(r,[i]).compileToFragments(e)},n.prototype.compileModulo=function(e){var t;return t=new z(new x(yt("modulo",e))),new o(t,[this.first,this.second]).compileToFragments(e)},n.prototype.toString=function(e){return n.__super__.toString.call(this,e,this.constructor.name+" "+this.operator)},n}(r),e.In=k=function(e){function t(e,t){this.object=e,this.array=t}return kt(t,e),t.prototype.children=["object","array"],t.prototype.invert=S,t.prototype.compileNode=function(e){var t,n,i,r,s;if(this.array instanceof z&&this.array.isArray()&&this.array.base.objects.length){for(s=this.array.base.objects,n=0,i=s.length;i>n;n++)if(r=s[n],r instanceof G){t=!0;break}if(!t)return this.compileOrTest(e)}return this.compileLoopTest(e)},t.prototype.compileOrTest=function(e){var t,n,i,r,s,o,a,c,l,h,u,p;for(c=this.object.cache(e,E),u=c[0],a=c[1],l=this.negated?[" !== "," && "]:[" === "," || "],t=l[0],n=l[1],p=[],h=this.array.base.objects,i=s=0,o=h.length;o>s;i=++s)r=h[i],i&&p.push(this.makeCode(n)),p=p.concat(i?a:u,this.makeCode(t),r.compileToFragments(e,T));return E>e.level?p:this.wrapInBraces(p)},t.prototype.compileLoopTest=function(e){var t,n,i,r;return i=this.object.cache(e,F),r=i[0],n=i[1],t=[].concat(this.makeCode(yt("indexOf",e)+".call("),this.array.compileToFragments(e,F),this.makeCode(", "),n,this.makeCode(") "+(this.negated?"< 0":">= 0"))),st(r)===st(n)?t:(t=r.concat(this.makeCode(", "),t),F>e.level?t:this.wrapInBraces(t))},t.prototype.toString=function(e){return t.__super__.toString.call(this,e,this.constructor.name+(this.negated?"!":""))},t}(r),e.Try=Y=function(e){function t(e,t,n,i){this.attempt=e,this.errorVariable=t,this.recovery=n,this.ensure=i}return kt(t,e),t.prototype.children=["attempt","recovery","ensure"],t.prototype.isStatement=Q,t.prototype.jumps=function(e){var t;return this.attempt.jumps(e)||(null!=(t=this.recovery)?t.jumps(e):void 0)},t.prototype.makeReturn=function(e){return this.attempt&&(this.attempt=this.attempt.makeReturn(e)),this.recovery&&(this.recovery=this.recovery.makeReturn(e)),this},t.prototype.compileNode=function(e){var t,n,r,s,o;return e.indent+=q,o=this.attempt.compileToFragments(e,L),t=this.recovery?(r=e.scope.freeVariable("error"),s=new x(r),this.errorVariable?this.recovery.unshift(new i(this.errorVariable,s)):void 0,[].concat(this.makeCode(" catch ("),s.compileToFragments(e),this.makeCode(") {\n"),this.recovery.compileToFragments(e,L),this.makeCode("\n"+this.tab+"}"))):this.ensure||this.recovery?[]:[this.makeCode(" catch ("+r+") {}")],n=this.ensure?[].concat(this.makeCode(" finally {\n"),this.ensure.compileToFragments(e,L),this.makeCode("\n"+this.tab+"}")):[],[].concat(this.makeCode(this.tab+"try {\n"),o,this.makeCode("\n"+this.tab+"}"),t,n)},t}(r),e.Throw=W=function(e){function t(e){this.expression=e}return kt(t,e),t.prototype.children=["expression"],t.prototype.isStatement=Q,t.prototype.jumps=D,t.prototype.makeReturn=X,t.prototype.compileNode=function(e){return[].concat(this.makeCode(this.tab+"throw "),this.expression.compileToFragments(e),this.makeCode(";"))},t}(r),e.Existence=u=function(e){function t(e){this.expression=e}return kt(t,e),t.prototype.children=["expression"],t.prototype.invert=S,t.prototype.compileNode=function(e){var t,n,i,r;return this.expression.front=this.front,i=this.expression.compile(e,E),g.test(i)&&!e.scope.check(i)?(r=this.negated?["===","||"]:["!==","&&"],t=r[0],n=r[1],i="typeof "+i+" "+t+' "undefined" '+n+" "+i+" "+t+" null"):i=i+" "+(this.negated?"==":"!=")+" null",[this.makeCode(C>=e.level?i:"("+i+")")]},t}(r),e.Parens=O=function(e){function t(e){this.body=e}return kt(t,e),t.prototype.children=["body"],t.prototype.unwrap=function(){return this.body},t.prototype.isComplex=function(){return this.body.isComplex()},t.prototype.compileNode=function(e){var t,n,i;return n=this.body.unwrap(),n instanceof z&&n.isAtomic()?(n.front=this.front,n.compileToFragments(e)):(i=n.compileToFragments(e,N),t=E>e.level&&(n instanceof I||n instanceof o||n instanceof f&&n.returns),t?i:this.wrapInBraces(i))},t}(r),e.For=f=function(e){function t(e,t){var n;this.source=t.source,this.guard=t.guard,this.step=t.step,this.name=t.name,this.index=t.index,this.body=s.wrap([e]),this.own=!!t.own,this.object=!!t.object,this.object&&(n=[this.index,this.name],this.name=n[0],this.index=n[1]),this.index instanceof z&&this.index.error("index cannot be a pattern matching expression"),this.range=this.source instanceof z&&this.source.base instanceof j&&!this.source.properties.length,this.pattern=this.name instanceof z,this.range&&this.index&&this.index.error("indexes do not apply to range loops"),this.range&&this.pattern&&this.name.error("cannot pattern match over range loops"),this.own&&!this.object&&this.name.error("cannot use own with for-in"),this.returns=!1}return kt(t,e),t.prototype.children=["body","source","guard","step"],t.prototype.compileNode=function(e){var t,n,r,o,a,c,l,h,u,p,d,f,m,v,b,k,w,T,C,E,N,S,D,A,I,_,$,j,B,V,P,U,G,H;return t=s.wrap([this.body]),D=t.expressions,T=D[D.length-1],(null!=T?T.jumps():void 0)instanceof M&&(this.returns=!1),B=this.range?this.source.base:this.source,j=e.scope,this.pattern||(E=this.name&&this.name.compile(e,F)),v=this.index&&this.index.compile(e,F),E&&!this.pattern&&j.find(E),v&&j.find(v),this.returns&&($=j.freeVariable("results")),b=this.object&&v||j.freeVariable("i",{single:!0}),k=this.range&&E||v||b,w=k!==b?k+" = ":"",this.step&&!this.range&&(A=this.cacheToCodeFragments(this.step.cache(e,F,ot)),V=A[0],U=A[1],P=U.match(R)),this.pattern&&(E=b),H="",d="",l="",f=this.tab+q,this.range?p=B.compileToFragments(ht(e,{index:b,name:E,step:this.step,isComplex:ot})):(G=this.source.compile(e,F),!E&&!this.own||g.test(G)||(l+=""+this.tab+(S=j.freeVariable("ref"))+" = "+G+";\n",G=S),E&&!this.pattern&&(N=E+" = "+G+"["+k+"]"),this.object||(V!==U&&(l+=""+this.tab+V+";\n"),this.step&&P&&(u=0>pt(P[0]))||(C=j.freeVariable("len")),a=""+w+b+" = 0, "+C+" = "+G+".length",c=""+w+b+" = "+G+".length - 1",r=b+" < "+C,o=b+" >= 0",this.step?(P?u&&(r=o,a=c):(r=U+" > 0 ? "+r+" : "+o,a="("+U+" > 0 ? ("+a+") : "+c+")"),m=b+" += "+U):m=""+(k!==b?"++"+b:b+"++"),p=[this.makeCode(a+"; "+r+"; "+w+m)])),this.returns&&(I=""+this.tab+$+" = [];\n",_="\n"+this.tab+"return "+$+";",t.makeReturn($)),this.guard&&(t.expressions.length>1?t.expressions.unshift(new y(new O(this.guard).invert(),new x("continue"))):this.guard&&(t=s.wrap([new y(this.guard,t)]))),this.pattern&&t.expressions.unshift(new i(this.name,new x(G+"["+k+"]"))),h=[].concat(this.makeCode(l),this.pluckDirectCall(e,t)),N&&(H="\n"+f+N+";"),this.object&&(p=[this.makeCode(k+" in "+G)],this.own&&(d="\n"+f+"if (!"+yt("hasProp",e)+".call("+G+", "+k+")) continue;")),n=t.compileToFragments(ht(e,{indent:f}),L),n&&n.length>0&&(n=[].concat(this.makeCode("\n"),n,this.makeCode("\n"))),[].concat(h,this.makeCode(""+(I||"")+this.tab+"for ("),p,this.makeCode(") {"+d+H),n,this.makeCode(this.tab+"}"+(_||"")))},t.prototype.pluckDirectCall=function(e,t){var n,r,s,a,l,h,u,p,d,f,m,g,v,b,y,k;for(r=[],d=t.expressions,l=h=0,u=d.length;u>h;l=++h)s=d[l],s=s.unwrapAll(),s instanceof o&&(k=null!=(f=s.variable)?f.unwrapAll():void 0,(k instanceof c||k instanceof z&&(null!=(m=k.base)?m.unwrapAll():void 0)instanceof c&&1===k.properties.length&&("call"===(g=null!=(v=k.properties[0].name)?v.value:void 0)||"apply"===g))&&(a=(null!=(b=k.base)?b.unwrapAll():void 0)||k,p=new x(e.scope.freeVariable("fn")),n=new z(p),k.base&&(y=[n,k],k.base=y[0],n=y[1]),t.expressions[l]=new o(n,s.args),r=r.concat(this.makeCode(this.tab),new i(p,a).compileToFragments(e,L),this.makeCode(";\n"))));return r},t}(J),e.Switch=H=function(e){function t(e,t,n){this.subject=e,this.cases=t,this.otherwise=n}return kt(t,e),t.prototype.children=["subject","cases","otherwise"],t.prototype.isStatement=Q,t.prototype.jumps=function(e){var t,n,i,r,s,o,a,c;for(null==e&&(e={block:!0}),o=this.cases,i=0,s=o.length;s>i;i++)if(a=o[i],n=a[0],t=a[1],r=t.jumps(e))return r;return null!=(c=this.otherwise)?c.jumps(e):void 0},t.prototype.makeReturn=function(e){var t,n,i,r,o;for(r=this.cases,t=0,n=r.length;n>t;t++)i=r[t],i[1].makeReturn(e);return e&&(this.otherwise||(this.otherwise=new s([new x("void 0")]))),null!=(o=this.otherwise)&&o.makeReturn(e),this},t.prototype.compileNode=function(e){var t,n,i,r,s,o,a,c,l,h,u,p,d,f,m,g;for(c=e.indent+q,l=e.indent=c+q,o=[].concat(this.makeCode(this.tab+"switch ("),this.subject?this.subject.compileToFragments(e,N):this.makeCode("false"),this.makeCode(") {\n")),f=this.cases,a=h=0,p=f.length;p>h;a=++h){for(m=f[a],r=m[0],t=m[1],g=rt([r]),u=0,d=g.length;d>u;u++)i=g[u],this.subject||(i=i.invert()),o=o.concat(this.makeCode(c+"case "),i.compileToFragments(e,N),this.makeCode(":\n"));if((n=t.compileToFragments(e,L)).length>0&&(o=o.concat(n,this.makeCode("\n"))),a===this.cases.length-1&&!this.otherwise)break;s=this.lastNonComment(t.expressions),s instanceof M||s instanceof x&&s.jumps()&&"debugger"!==s.value||o.push(i.makeCode(l+"break;\n"))}return this.otherwise&&this.otherwise.expressions.length&&o.push.apply(o,[this.makeCode(c+"default:\n")].concat(Ct.call(this.otherwise.compileToFragments(e,L)),[this.makeCode("\n")])),o.push(this.makeCode(this.tab+"}")),o},t}(r),e.If=y=function(e){function t(e,t,n){this.body=t,null==n&&(n={}),this.condition="unless"===n.type?e.invert():e,this.elseBody=null,this.isChain=!1,this.soak=n.soak}return kt(t,e),t.prototype.children=["condition","body","elseBody"],t.prototype.bodyNode=function(){var e;return null!=(e=this.body)?e.unwrap():void 0},t.prototype.elseBodyNode=function(){var e;return null!=(e=this.elseBody)?e.unwrap():void 0},t.prototype.addElse=function(e){return this.isChain?this.elseBodyNode().addElse(e):(this.isChain=e instanceof t,this.elseBody=this.ensureBlock(e),this.elseBody.updateLocationDataIfMissing(e.locationData)),this},t.prototype.isStatement=function(e){var t;return(null!=e?e.level:void 0)===L||this.bodyNode().isStatement(e)||(null!=(t=this.elseBodyNode())?t.isStatement(e):void 0)},t.prototype.jumps=function(e){var t;return this.body.jumps(e)||(null!=(t=this.elseBody)?t.jumps(e):void 0)},t.prototype.compileNode=function(e){return this.isStatement(e)?this.compileStatement(e):this.compileExpression(e)},t.prototype.makeReturn=function(e){return e&&(this.elseBody||(this.elseBody=new s([new x("void 0")]))),this.body&&(this.body=new s([this.body.makeReturn(e)])),this.elseBody&&(this.elseBody=new s([this.elseBody.makeReturn(e)])),this},t.prototype.ensureBlock=function(e){return e instanceof s?e:new s([e])},t.prototype.compileStatement=function(e){var n,i,r,s,o,a,c;return r=tt(e,"chainChild"),(o=tt(e,"isExistentialEquals"))?new t(this.condition.invert(),this.elseBodyNode(),{type:"if"}).compileToFragments(e):(c=e.indent+q,s=this.condition.compileToFragments(e,N),i=this.ensureBlock(this.body).compileToFragments(ht(e,{indent:c})),a=[].concat(this.makeCode("if ("),s,this.makeCode(") {\n"),i,this.makeCode("\n"+this.tab+"}")),r||a.unshift(this.makeCode(this.tab)),this.elseBody?(n=a.concat(this.makeCode(" else ")),this.isChain?(e.chainChild=!0,n=n.concat(this.elseBody.unwrap().compileToFragments(e,L))):n=n.concat(this.makeCode("{\n"),this.elseBody.compileToFragments(ht(e,{indent:c}),L),this.makeCode("\n"+this.tab+"}")),n):a)},t.prototype.compileExpression=function(e){var t,n,i,r;return i=this.condition.compileToFragments(e,C),n=this.bodyNode().compileToFragments(e,F),t=this.elseBodyNode()?this.elseBodyNode().compileToFragments(e,F):[this.makeCode("void 0")],r=i.concat(this.makeCode(" ? "),n,this.makeCode(" : "),t),e.level>=C?this.wrapInBraces(r):r},t.prototype.unfoldSoak=function(){return this.soak&&this},t}(r),K={extend:function(e){return"function(child, parent) { for (var key in parent) { if ("+yt("hasProp",e)+".call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }"},bind:function(){return"function(fn, me){ return function(){ return fn.apply(me, arguments); }; }"},indexOf:function(){return"[].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; }"},modulo:function(){return"function(a, b) { return (+a % (b = +b) + b) % b; }"},hasProp:function(){return"{}.hasOwnProperty"},slice:function(){return"[].slice"}},L=1,N=2,F=3,C=4,E=5,T=6,q=" ",g=/^(?!\d)[$\w\x7f-\uffff]+$/,B=/^[+-]?\d+$/,m=/^[+-]?0x[\da-f]+/i,R=/^[+-]?(?:0x[\da-f]+|\d*\.?\d+(?:e[+-]?\d+)?)$/i,b=/^['"]/,v=/^\//,yt=function(e,t){var n,i;return i=t.scope.root,e in i.utilities?i.utilities[e]:(n=i.freeVariable(e),i.assign(n,K[e](t)),i.utilities[e]=n)},ut=function(e,t){return e=e.replace(/\n/g,"$&"+t),e.replace(/\s+$/,"")},pt=function(e){return null==e?0:e.match(m)?parseInt(e,16):parseFloat(e)},at=function(e){return e instanceof x&&"arguments"===e.value&&!e.asKey},ct=function(e){return e instanceof x&&"this"===e.value&&!e.asKey||e instanceof c&&e.bound||e instanceof o&&e.isSuper},ot=function(e){return e.isComplex()||("function"==typeof e.isAssignable?e.isAssignable():void 0)},bt=function(e,t,n){var i;if(i=t[n].unfoldSoak(e))return t[n]=i.body,i.body=new z(t),i}}.call(this),t.exports}(),require["./sourcemap"]=function(){var e={},t={exports:e};return function(){var e,n;e=function(){function e(e){this.line=e,this.columns=[]}return e.prototype.add=function(e,t,n){var i,r;return r=t[0],i=t[1],null==n&&(n={}),this.columns[e]&&n.noReplace?void 0:this.columns[e]={line:this.line,column:e,sourceLine:r,sourceColumn:i}},e.prototype.sourceLocation=function(e){for(var t;!((t=this.columns[e])||0>=e);)e--;return t&&[t.sourceLine,t.sourceColumn]},e}(),n=function(){function t(){this.lines=[]}var n,i,r,s;return t.prototype.add=function(t,n,i){var r,s,o,a;return null==i&&(i={}),o=n[0],s=n[1],a=(r=this.lines)[o]||(r[o]=new e(o)),a.add(s,t,i)},t.prototype.sourceLocation=function(e){var t,n,i;for(n=e[0],t=e[1];!((i=this.lines[n])||0>=n);)n--;return i&&i.sourceLocation(t)},t.prototype.generate=function(e,t){var n,i,r,s,o,a,c,l,h,u,p,d,f,m,g,v;for(null==e&&(e={}),null==t&&(t=null),v=0,s=0,a=0,o=0,d=!1,n="",f=this.lines,u=i=0,c=f.length;c>i;u=++i)if(h=f[u])for(m=h.columns,r=0,l=m.length;l>r;r++)if(p=m[r]){for(;p.line>v;)s=0,d=!1,n+=";",v++;d&&(n+=",",d=!1),n+=this.encodeVlq(p.column-s),s=p.column,n+=this.encodeVlq(0),n+=this.encodeVlq(p.sourceLine-a),a=p.sourceLine,n+=this.encodeVlq(p.sourceColumn-o),o=p.sourceColumn,d=!0}return g={version:3,file:e.generatedFile||"",sourceRoot:e.sourceRoot||"",sources:e.sourceFiles||[""],names:[],mappings:n},e.inline&&(g.sourcesContent=[t]),JSON.stringify(g,null,2)},r=5,i=1<e?1:0,a=(Math.abs(e)<<1)+o;a||!t;)n=a&s,a>>=r,a&&(n|=i),t+=this.encodeBase64(n);return t},n="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",t.prototype.encodeBase64=function(e){return n[e]||function(){throw Error("Cannot Base64 encode value: "+e) -}()},t}(),t.exports=n}.call(this),t.exports}(),require["./coffee-script"]=function(){var e={},t={exports:e};return function(){var t,n,i,r,s,o,a,c,l,h,u,p,d,f,m,g,v,b,y={}.hasOwnProperty,k=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1};if(a=require("fs"),v=require("vm"),f=require("path"),t=require("./lexer").Lexer,d=require("./parser").parser,l=require("./helpers"),n=require("./sourcemap"),e.VERSION="1.10.0",e.FILE_EXTENSIONS=[".coffee",".litcoffee",".coffee.md"],e.helpers=l,b=function(e){return function(t,n){var i,r;null==n&&(n={});try{return e.call(this,t,n)}catch(r){if(i=r,"string"!=typeof t)throw i;throw l.updateSyntaxError(i,t,n.filename)}}},e.compile=r=b(function(e,t){var i,r,s,o,a,c,h,u,f,m,g,v,b,y,k;for(v=l.merge,o=l.extend,t=o({},t),t.sourceMap&&(g=new n),k=p.tokenize(e,t),t.referencedVars=function(){var e,t,n;for(n=[],e=0,t=k.length;t>e;e++)y=k[e],y.variable&&n.push(y[1]);return n}(),c=d.parse(k).compileToFragments(t),s=0,t.header&&(s+=1),t.shiftLine&&(s+=1),r=0,f="",u=0,m=c.length;m>u;u++)a=c[u],t.sourceMap&&(a.locationData&&!/^[;\s]*$/.test(a.code)&&g.add([a.locationData.first_line,a.locationData.first_column],[s,r],{noReplace:!0}),b=l.count(a.code,"\n"),s+=b,b?r=a.code.length-(a.code.lastIndexOf("\n")+1):r+=a.code.length),f+=a.code;return t.header&&(h="Generated by CoffeeScript "+this.VERSION,f="// "+h+"\n"+f),t.sourceMap?(i={js:f},i.sourceMap=g,i.v3SourceMap=g.generate(t,e),i):f}),e.tokens=b(function(e,t){return p.tokenize(e,t)}),e.nodes=b(function(e,t){return"string"==typeof e?d.parse(p.tokenize(e,t)):d.parse(e)}),e.run=function(e,t){var n,i,s,o;return null==t&&(t={}),s=require.main,s.filename=process.argv[1]=t.filename?a.realpathSync(t.filename):".",s.moduleCache&&(s.moduleCache={}),i=t.filename?f.dirname(a.realpathSync(t.filename)):a.realpathSync("."),s.paths=require("module")._nodeModulePaths(i),(!l.isCoffee(s.filename)||require.extensions)&&(n=r(e,t),e=null!=(o=n.js)?o:n),s._compile(e,s.filename)},e.eval=function(e,t){var n,i,s,o,a,c,l,h,u,p,d,m,g,b,k,w,T;if(null==t&&(t={}),e=e.trim()){if(o=null!=(m=v.Script.createContext)?m:v.createContext,c=null!=(g=v.isContext)?g:function(){return t.sandbox instanceof o().constructor},o){if(null!=t.sandbox){if(c(t.sandbox))w=t.sandbox;else{w=o(),b=t.sandbox;for(h in b)y.call(b,h)&&(T=b[h],w[h]=T)}w.global=w.root=w.GLOBAL=w}else w=global;if(w.__filename=t.filename||"eval",w.__dirname=f.dirname(w.__filename),w===global&&!w.module&&!w.require){for(n=require("module"),w.module=i=new n(t.modulename||"eval"),w.require=s=function(e){return n._load(e,i,!0)},i.filename=w.__filename,k=Object.getOwnPropertyNames(require),a=0,u=k.length;u>a;a++)d=k[a],"paths"!==d&&"arguments"!==d&&"caller"!==d&&(s[d]=require[d]);s.paths=i.paths=n._nodeModulePaths(process.cwd()),s.resolve=function(e){return n._resolveFilename(e,i)}}}p={};for(h in t)y.call(t,h)&&(T=t[h],p[h]=T);return p.bare=!0,l=r(e,p),w===global?v.runInThisContext(l):v.runInContext(l,w)}},e.register=function(){return require("./register")},require.extensions)for(m=this.FILE_EXTENSIONS,h=0,u=m.length;u>h;h++)s=m[h],null==(i=require.extensions)[s]&&(i[s]=function(){throw Error("Use CoffeeScript.register() or require the coffee-script/register module to require "+s+" files.")});e._compileFile=function(e,t){var n,i,s,o,c;null==t&&(t=!1),o=a.readFileSync(e,"utf8"),c=65279===o.charCodeAt(0)?o.substring(1):o;try{n=r(c,{filename:e,sourceMap:t,literate:l.isLiterate(e)})}catch(s){throw i=s,l.updateSyntaxError(i,c,e)}return n},p=new t,d.lexer={lex:function(){var e,t;return t=d.tokens[this.pos++],t?(e=t[0],this.yytext=t[1],this.yylloc=t[2],d.errorToken=t.origin||t,this.yylineno=this.yylloc.first_line):e="",e},setInput:function(e){return d.tokens=e,this.pos=0},upcomingInput:function(){return""}},d.yy=require("./nodes"),d.yy.parseError=function(e,t){var n,i,r,s,o,a;return o=t.token,s=d.errorToken,a=d.tokens,i=s[0],r=s[1],n=s[2],r=function(){switch(!1){case s!==a[a.length-1]:return"end of input";case"INDENT"!==i&&"OUTDENT"!==i:return"indentation";case"IDENTIFIER"!==i&&"NUMBER"!==i&&"STRING"!==i&&"STRING_START"!==i&&"REGEX"!==i&&"REGEX_START"!==i:return i.replace(/_START$/,"").toLowerCase();default:return l.nameWhitespaceCharacter(r)}}(),l.throwSyntaxError("unexpected "+r,n)},o=function(e,t){var n,i,r,s,o,a,c,l,h,u,p,d;return s=void 0,r="",e.isNative()?r="native":(e.isEval()?(s=e.getScriptNameOrSourceURL(),s||(r=e.getEvalOrigin()+", ")):s=e.getFileName(),s||(s=""),l=e.getLineNumber(),i=e.getColumnNumber(),u=t(s,l,i),r=u?s+":"+u[0]+":"+u[1]:s+":"+l+":"+i),o=e.getFunctionName(),a=e.isConstructor(),c=!(e.isToplevel()||a),c?(h=e.getMethodName(),d=e.getTypeName(),o?(p=n="",d&&o.indexOf(d)&&(p=d+"."),h&&o.indexOf("."+h)!==o.length-h.length-1&&(n=" [as "+h+"]"),""+p+o+n+" ("+r+")"):d+"."+(h||"")+" ("+r+")"):a?"new "+(o||"")+" ("+r+")":o?o+" ("+r+")":r},g={},c=function(t){var n,i;if(g[t])return g[t];if(i=null!=f?f.extname(t):void 0,!(0>k.call(e.FILE_EXTENSIONS,i)))return n=e._compileFile(t,!0),g[t]=n.sourceMap},Error.prepareStackTrace=function(t,n){var i,r,s;return s=function(e,t,n){var i,r;return r=c(e),r&&(i=r.sourceLocation([t-1,n-1])),i?[i[0]+1,i[1]+1]:null},r=function(){var t,r,a;for(a=[],t=0,r=n.length;r>t&&(i=n[t],i.getFunction()!==e.run);t++)a.push(" at "+o(i,s));return a}(),""+t+"\n"+r.join("\n")+"\n"}}.call(this),t.exports}(),require["./browser"]=function(){var exports={},module={exports:exports};return function(){var CoffeeScript,compile,runScripts,indexOf=[].indexOf||function(e){for(var t=0,n=this.length;n>t;t++)if(t in this&&this[t]===e)return t;return-1};CoffeeScript=require("./coffee-script"),CoffeeScript.require=require,compile=CoffeeScript.compile,CoffeeScript.eval=function(code,options){return null==options&&(options={}),null==options.bare&&(options.bare=!0),eval(compile(code,options))},CoffeeScript.run=function(e,t){return null==t&&(t={}),t.bare=!0,t.shiftLine=!0,Function(compile(e,t))()},"undefined"!=typeof window&&null!==window&&("undefined"!=typeof btoa&&null!==btoa&&"undefined"!=typeof JSON&&null!==JSON&&"undefined"!=typeof unescape&&null!==unescape&&"undefined"!=typeof encodeURIComponent&&null!==encodeURIComponent&&(compile=function(e,t){var n,i,r;return null==t&&(t={}),t.sourceMap=!0,t.inline=!0,i=CoffeeScript.compile(e,t),n=i.js,r=i.v3SourceMap,n+"\n//# sourceMappingURL=data:application/json;base64,"+btoa(unescape(encodeURIComponent(r)))+"\n//# sourceURL=coffeescript"}),CoffeeScript.load=function(e,t,n,i){var r;return null==n&&(n={}),null==i&&(i=!1),n.sourceFiles=[e],r=window.ActiveXObject?new window.ActiveXObject("Microsoft.XMLHTTP"):new window.XMLHttpRequest,r.open("GET",e,!0),"overrideMimeType"in r&&r.overrideMimeType("text/plain"),r.onreadystatechange=function(){var s,o;if(4===r.readyState){if(0!==(o=r.status)&&200!==o)throw Error("Could not load "+e);if(s=[r.responseText,n],i||CoffeeScript.run.apply(CoffeeScript,s),t)return t(s)}},r.send(null)},runScripts=function(){var e,t,n,i,r,s,o,a,c,l,h;for(h=window.document.getElementsByTagName("script"),t=["text/coffeescript","text/literate-coffeescript"],e=function(){var e,n,i,r;for(r=[],e=0,n=h.length;n>e;e++)c=h[e],i=c.type,indexOf.call(t,i)>=0&&r.push(c);return r}(),s=0,n=function(){var t;return t=e[s],t instanceof Array?(CoffeeScript.run.apply(CoffeeScript,t),s++,n()):void 0},i=function(i,r){var s,o;return s={literate:i.type===t[1]},o=i.src||i.getAttribute("data-src"),o?CoffeeScript.load(o,function(t){return e[r]=t,n()},s,!0):(s.sourceFiles=["embedded"],e[r]=[i.innerHTML,s])},r=o=0,a=e.length;a>o;r=++o)l=e[r],i(l,r);return n()},window.addEventListener?window.addEventListener("DOMContentLoaded",runScripts,!1):window.attachEvent("onload",runScripts))}.call(this),module.exports}(),require["./coffee-script"]}();"function"==typeof define&&define.amd?define(function(){return CoffeeScript}):root.CoffeeScript=CoffeeScript})(this); \ No newline at end of file +var $jscomp=$jscomp||{};$jscomp.scope={};$jscomp.checkStringArgs=function(u,xa,va){if(null==u)throw new TypeError("The 'this' value for String.prototype."+va+" must not be null or undefined");if(xa instanceof RegExp)throw new TypeError("First argument to String.prototype."+va+" must not be a regular expression");return u+""}; +$jscomp.defineProperty="function"==typeof Object.defineProperties?Object.defineProperty:function(u,xa,va){if(va.get||va.set)throw new TypeError("ES3 does not support getters and setters.");u!=Array.prototype&&u!=Object.prototype&&(u[xa]=va.value)};$jscomp.getGlobal=function(u){return"undefined"!=typeof window&&window===u?u:"undefined"!=typeof global&&null!=global?global:u};$jscomp.global=$jscomp.getGlobal(this); +$jscomp.polyfill=function(u,xa,va,f){if(xa){va=$jscomp.global;u=u.split(".");for(f=0;fu||1342177279>>=1)va+=va;return f}},"es6-impl","es3");$jscomp.findInternal=function(u,xa,va){u instanceof String&&(u=String(u));for(var f=u.length,qa=0;qa>>=1,a+=a;return g};f.compact=function(a){var g,b;var n=[];var y=0;for(b=a.length;yc)return m.call(this,L,a-1);(w=L[0],0<=y.call(g,w))?c+=1:(l=L[0],0<=y.call(h,l))&&--c;a+=1}return a-1};l.prototype.removeLeadingNewlines=function(){var a,b;var m=this.tokens;var k=a=0;for(b=m.length;ag;f=0<=g?++b:--b){for(;"HERECOMMENT"===this.tag(l+f+c);)c+=2;if(null!=h[f]&&("string"===typeof h[f]&&(h[f]=[h[f]]),k=this.tag(l+f+c),0>y.call(h[f],k)))return-1}return l+f+c-1};l.prototype.looksObjectish=function(a){if(-1y.call(b,w))&&((f=this.tag(a),0>y.call(g,f))||this.tokens[a].generated)&&(n=this.tag(a),0>y.call(R,n)));)(k=this.tag(a),0<=y.call(h,k))&&c.push(this.tag(a)),(l=this.tag(a),0<=y.call(g, +l))&&c.length&&c.pop(),--a;return x=this.tag(a),0<=y.call(b,x)};l.prototype.addImplicitBracesAndParens=function(){var a=[];var l=null;return this.scanTokens(function(c,k,f){var m,w,n,r;var G=c[0];var K=(m=0y.call(h,a):return l[1];case "@"!==this.tag(k-2):return k-2;default:return k-1}}.call(this);"HERECOMMENT"===this.tag(q-2);)q-=2;this.insideForDeclaration="FOR"===u;m=0===q||(r=this.tag(q-1),0<=y.call(R,r))||f[q-1].newLine;if(B()&&(T=B(),r=T[0],v=T[1],("{"===r||"INDENT"===r&&"{"===this.tag(v-1))&&(m||","===this.tag(q-1)||"{"===this.tag(q-1))))return A(1);M(q,!!m);return A(2)}if(0<=y.call(R,G))for(M=a.length-1;0<=M;M+=-1)r=a[M],E(r)&&(r[2].sameLine= +!1);M="OUTDENT"===K||m.newLine;if(0<=y.call(x,G)||0<=y.call(z,G)&&M)for(;O();)if(M=B(),r=M[0],v=M[1],m=M[2],M=m.sameLine,m=m.startsLine,C()&&","!==K)S();else if(T()&&!this.insideForDeclaration&&M&&"TERMINATOR"!==G&&":"!==K)q();else if(!T()||"TERMINATOR"!==G||","===K||m&&this.looksObjectish(k+1))break;else{if("HERECOMMENT"===u)return A(1);q()}if(!(","!==G||this.looksObjectish(k+1)||!T()||this.insideForDeclaration||"TERMINATOR"===u&&this.looksObjectish(k+2)))for(u="OUTDENT"===u?1:0;T();)q(k+u);return A(1)})}; +l.prototype.addLocationDataToGeneratedTokens=function(){return this.scanTokens(function(a,b,g){var c,l;if(a[2]||!a.generated&&!a.explicit)return 1;if("{"===a[0]&&(c=null!=(l=g[b+1])?l[2]:void 0)){var m=c.first_line;c=c.first_column}else(c=null!=(m=g[b-1])?m[2]:void 0)?(m=c.last_line,c=c.last_column):m=c=0;a[2]={first_line:m,first_column:c,last_line:m,last_column:c};return 1})};l.prototype.fixOutdentLocationData=function(){return this.scanTokens(function(a,b,g){if(!("OUTDENT"===a[0]||a.generated&& +"CALL_END"===a[0]||a.generated&&"}"===a[0]))return 1;b=g[b-1][2];a[2]={first_line:b.last_line,first_column:b.last_column,last_line:b.last_line,last_column:b.last_column};return 1})};l.prototype.normalizeLines=function(){var b,g;var l=b=g=null;var k=function(a,b){var c,g,k,f;return";"!==a[1]&&(c=a[0],0<=y.call(O,c))&&!("TERMINATOR"===a[0]&&(g=this.tag(b+1),0<=y.call(H,g)))&&!("ELSE"===a[0]&&"THEN"!==l)&&!!("CATCH"!==(k=a[0])&&"FINALLY"!==k||"-\x3e"!==l&&"\x3d\x3e"!==l)||(f=a[0],0<=y.call(z,f))&&(this.tokens[b- +1].newLine||"OUTDENT"===this.tokens[b-1][0])};var f=function(a,b){return this.tokens.splice(","===this.tag(b-1)?b-1:b,0,g)};return this.scanTokens(function(c,m,h){var w,n,r;c=c[0];if("TERMINATOR"===c){if("ELSE"===this.tag(m+1)&&"OUTDENT"!==this.tag(m-1))return h.splice.apply(h,[m,1].concat(a.call(this.indentation()))),1;if(w=this.tag(m+1),0<=y.call(H,w))return h.splice(m,1),0}if("CATCH"===c)for(w=n=1;2>=n;w=++n)if("OUTDENT"===(r=this.tag(m+w))||"TERMINATOR"===r||"FINALLY"===r)return h.splice.apply(h, +[m+w,0].concat(a.call(this.indentation()))),2+w;0<=y.call(J,c)&&"INDENT"!==this.tag(m+1)&&("ELSE"!==c||"IF"!==this.tag(m+1))&&(l=c,r=this.indentation(h[m]),b=r[0],g=r[1],"THEN"===l&&(b.fromThen=!0),h.splice(m+1,0,b),this.detectEnd(m+2,k,f),"THEN"===c&&h.splice(m,1));return 1})};l.prototype.tagPostfixConditionals=function(){var a=null;var b=function(a,b){a=a[0];b=this.tokens[b-1][0];return"TERMINATOR"===a||"INDENT"===a&&0>y.call(J,b)};var g=function(b,c){if("INDENT"!==b[0]||b.generated&&!b.fromThen)return a[0]= +"POST_"+a[0]};return this.scanTokens(function(c,l){if("IF"!==c[0])return 1;a=c;this.detectEnd(l+1,b,g);return 1})};l.prototype.indentation=function(a){var b=["INDENT",2];var c=["OUTDENT",2];a?(b.generated=c.generated=!0,b.origin=c.origin=a):b.explicit=c.explicit=!0;return[b,c]};l.prototype.generate=b;l.prototype.tag=function(a){var b;return null!=(b=this.tokens[a])?b[0]:void 0};return l}();var ya=[["(",")"],["[","]"],["{","}"],["INDENT","OUTDENT"],["CALL_START","CALL_END"],["PARAM_START","PARAM_END"], +["INDEX_START","INDEX_END"],["STRING_START","STRING_END"],["REGEX_START","REGEX_END"]];f.INVERSES=u={};var g=[];var h=[];var r=0;for(q=ya.length;rthis.indent){if(c||"RETURN"===this.tag())return this.indebt=b-this.indent,this.suppressNewlines(),a.length;if(!this.tokens.length)return this.baseIndent= +this.indent=b,a.length;c=b-this.indent+this.outdebt;this.token("INDENT",c,a.length-b,b);this.indents.push(c);this.ends.push({tag:"OUTDENT"});this.outdebt=this.indebt=0;this.indent=b}else bl&&(m=this.token("+","+"),m[2]={first_line:w[2].first_line,first_column:w[2].first_column,last_line:w[2].first_line,last_column:w[2].first_column});(f=this.tokens).push.apply(f,r)}if(k)return a=a[a.length-1],k.origin=["STRING",null,{first_line:k[2].first_line,first_column:k[2].first_column,last_line:a[2].last_line,last_column:a[2].last_column}],k=this.token("STRING_END",")"),k[2]={first_line:a[2].last_line,first_column:a[2].last_column, +last_line:a[2].last_line,last_column:a[2].last_column}};a.prototype.pair=function(a){var b=this.ends;b=b[b.length-1];return a!==(b=null!=b?b.tag:void 0)?("OUTDENT"!==b&&this.error("unmatched "+a),b=this.indents,b=b[b.length-1],this.outdentToken(b,!0),this.pair(a)):this.ends.pop()};a.prototype.getLineAndColumnFromChunk=function(a){if(0===a)return[this.chunkLine,this.chunkColumn];var b=a>=this.chunk.length?this.chunk:this.chunk.slice(0,+(a-1)+1||9E9);a=g(b,"\n");var c=this.chunkColumn;0a)return b(a);var c=Math.floor((a-65536)/1024)+55296;a=(a-65536)%1024+56320;return""+b(c)+b(a)};a.prototype.replaceUnicodeCodePointEscapes= +function(a,b){return a.replace(sa,function(a){return function(c,g,k,h){if(g)return g;c=parseInt(k,16);1114111q.call(y.call(I).concat(y.call(F)),a):return"keyword '"+b+"' can't be assigned";case 0>q.call(O, +a):return"'"+b+"' can't be assigned";case 0>q.call(J,a):return"reserved word '"+b+"' can't be assigned";default:return!1}};f.isUnassignable=B;var H=function(a){var b;return"IDENTIFIER"===a[0]?("from"===a[1]&&(a[1][0]="IDENTIFIER",!0),!0):"FOR"===a[0]?!1:"{"===(b=a[1])||"["===b||","===b||":"===b?!1:!0};var I="true false null this new delete typeof in instanceof return throw break continue debugger yield if else switch for while do try catch finally class extends super import export default".split(" "); +var F="undefined Infinity NaN then unless until loop of by when".split(" ");var Q={and:"\x26\x26",or:"||",is:"\x3d\x3d",isnt:"!\x3d",not:"!",yes:"true",no:"false",on:"true",off:"false"};var x=function(){var a=[];for(qa in Q)a.push(qa);return a}();F=F.concat(x);var J="case function var void with const let enum native implements interface package private protected public static".split(" ");var O=["arguments","eval"];f.JS_FORBIDDEN=I.concat(J).concat(O);var R=65279;var z=/^(?!\d)((?:(?!\s)[$\w\x7f-\uffff])+)([^\n\S]*:(?!:))?/; +var l=/^0b[01]+|^0o[0-7]+|^0x[\da-f]+|^\d*\.?\d+(?:e[+-]?\d+)?/i;var c=/^(?:[-=]>|[-+*\/%<>&|^!?=]=|>>>=?|([-+:])\1|([&|<>*\/%])\2=?|\?(\.|::)|\.{2,3})/;var w=/^[^\n\S]+/;var m=/^###([^#][\s\S]*?)(?:###[^\n\S]*|###$)|^(?:\s*#(?!##[^#]).*)+/;var k=/^[-=]>/;var K=/^(?:\n[^\n\S]*)+/;var P=/^`(?!``)((?:[^`\\]|\\[\s\S])*)`/;var L=/^```((?:[^`\\]|\\[\s\S]|`(?!``))*)```/;var V=/^(?:'''|"""|'|")/;var X=/^(?:[^\\']|\\[\s\S])*/;var G=/^(?:[^\\"#]|\\[\s\S]|\#(?!\{))*/;var aa=/^(?:[^\\']|\\[\s\S]|'(?!''))*/; +var U=/^(?:[^\\"#]|\\[\s\S]|"(?!"")|\#(?!\{))*/;var W=/((?:\\\\)+)|\\[^\S\n]*\n\s*/g;var D=/\s*\n\s*/g;var A=/\n+([^\n\S]*)(?=\S)/g;var fc=/^\/(?!\/)((?:[^[\/\n\\]|\\[^\n]|\[(?:\\[^\n]|[^\]\n\\])*\])*)(\/)?/;var E=/^\w*/;var ba=/^(?!.*(.).*\1)[imguy]*$/;var ca=/^(?:[^\\\/#]|\\[\s\S]|\/(?!\/\/)|\#(?!\{))*/;var C=/((?:\\\\)+)|\\(\s)|\s+(?:#.*)?/g;var T=/^(\/|\/{3}\s*)(\*)/;var v=/^\/=?\s/;var Y=/\*\//;var S=/^\s*(?:,|\??\.(?![.\d])|::)/;var M=/((?:^|[^\\])(?:\\\\)*)\\(?:(0[0-7]|[1-7])|(x(?![\da-fA-F]{2}).{0,2})|(u\{(?![\da-fA-F]{1,}\})[^}]*\}?)|(u(?!\{|[\da-fA-F]{4}).{0,4}))/; +var va=/((?:^|[^\\])(?:\\\\)*)\\(?:(0[0-7])|(x(?![\da-fA-F]{2}).{0,2})|(u\{(?![\da-fA-F]{1,}\})[^}]*\}?)|(u(?!\{|[\da-fA-F]{4}).{0,4}))/;var sa=/(\\\\)|\\u\{([\da-fA-F]+)\}/g;var za=/^[^\n\S]*\n/;var ma=/\n[^\n\S]*$/;var Z=/\s+$/;var fa="-\x3d +\x3d /\x3d *\x3d %\x3d ||\x3d \x26\x26\x3d ?\x3d \x3c\x3c\x3d \x3e\x3e\x3d \x3e\x3e\x3e\x3d \x26\x3d ^\x3d |\x3d **\x3d //\x3d %%\x3d".split(" ");var ia=["NEW","TYPEOF","DELETE","DO"];var ga=["!","~"];var ja=["\x3c\x3c","\x3e\x3e","\x3e\x3e\x3e"];var la="\x3d\x3d !\x3d \x3c \x3e \x3c\x3d \x3e\x3d".split(" "); +var oa=["*","/","%","//","%%"];var pa=["IN","OF","INSTANCEOF"];var ha="IDENTIFIER PROPERTY ) ] ? @ THIS SUPER".split(" ");var ka=ha.concat("NUMBER INFINITY NAN STRING STRING_END REGEX REGEX_END BOOL NULL UNDEFINED } ::".split(" "));var na=ka.concat(["++","--"]);var ra=["INDENT","OUTDENT","TERMINATOR"];var da=[")","}","]"]}).call(this);return f}();u["./parser"]=function(){var f={},qa={exports:f},q=function(){function f(){this.yy={}}var a=function(a,p,t,d){t=t||{};for(d=a.length;d--;t[a[d]]=p);return t}, +b=[1,22],u=[1,25],g=[1,83],h=[1,79],r=[1,84],n=[1,85],B=[1,81],H=[1,82],I=[1,56],F=[1,58],Q=[1,59],x=[1,60],J=[1,61],O=[1,62],R=[1,49],z=[1,50],l=[1,32],c=[1,68],w=[1,69],m=[1,78],k=[1,47],K=[1,51],P=[1,52],L=[1,67],V=[1,65],X=[1,66],G=[1,64],aa=[1,42],U=[1,48],W=[1,63],D=[1,73],A=[1,74],q=[1,75],E=[1,76],ba=[1,46],ca=[1,72],C=[1,34],T=[1,35],v=[1,36],Y=[1,37],S=[1,38],M=[1,39],qa=[1,86],sa=[1,6,32,42,131],za=[1,101],ma=[1,89],Z=[1,88],fa=[1,87],ia=[1,90],ga=[1,91],ja=[1,92],la=[1,93],oa=[1,94],pa= +[1,95],ha=[1,96],ka=[1,97],na=[1,98],ra=[1,99],da=[1,100],va=[1,104],N=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],xa=[2,166],ta=[1,110],Na=[1,111],Fa=[1,112],Ga=[1,113],Ca=[1,115],Pa=[1,116],Ia=[1,109],Ea=[1,6,32,42,131,133,135,139,156],Va=[2,27],ea=[1,123],Ya=[1,121],Ba=[1,6,31,32,40,41,42,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172, +173,174],Ha=[2,94],t=[1,6,31,32,42,46,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],p=[2,73],d=[1,128],wa=[1,133],e=[1,134],Da=[1,136],Ta=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],ua=[2,91],Eb=[1,6,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168, +169,170,171,172,173,174],Za=[2,63],Fb=[1,166],$a=[1,178],Ua=[1,180],Gb=[1,175],Oa=[1,182],sb=[1,184],La=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,96,113,114,115,120,122,131,133,134,135,139,140,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],Hb=[2,110],Ib=[1,6,31,32,40,41,42,58,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],Jb=[1,6,31,32,40,41,42,46,58,65,70,73,82,83,84, +85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],Kb=[40,41,114],Lb=[1,241],tb=[1,240],Ma=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156],Ja=[2,71],Mb=[1,250],Sa=[6,31,32,65,70],fb=[6,31,32,55,65,70,73],ab=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,164,166,167,168,169,170,171,172,173,174],Nb=[40,41,82,83,84,85,87,90,113,114],gb=[1,269],bb=[2,62],hb=[1,279],Wa=[1,281],ub=[1, +286],cb=[1,288],Ob=[2,187],vb=[1,6,31,32,40,41,42,55,65,70,73,82,83,84,85,87,89,90,94,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],ib=[1,297],Qa=[6,31,32,70,115,120],Pb=[1,6,31,32,40,41,42,55,58,65,70,73,82,83,84,85,87,89,90,94,96,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],Qb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,140,156],Xa=[1,6,31,32, +42,65,70,73,89,94,115,120,122,131,134,140,156],jb=[146,147,148],kb=[70,146,147,148],lb=[6,31,94],Rb=[1,311],Aa=[6,31,32,70,94],Sb=[6,31,32,58,70,94],wb=[6,31,32,55,58,70,94],Tb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,159,160,166,167,168,169,170,171,172,173,174],Ub=[12,28,34,38,40,41,44,45,48,49,50,51,52,53,61,62,63,67,68,89,92,95,97,105,112,117,118,119,125,129,130,133,135,137,139,149,155,157,158,159,160,161,162],Vb=[2,176],Ra=[6,31,32],db=[2,72],Wb=[1,323],Xb=[1,324], +Yb=[1,6,31,32,42,65,70,73,89,94,115,120,122,127,128,131,133,134,135,139,140,151,153,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],mb=[32,151,153],Zb=[1,6,32,42,65,70,73,89,94,115,120,122,131,134,140,156],nb=[1,350],xb=[1,356],yb=[1,6,32,42,131,156],eb=[2,86],ob=[1,367],pb=[1,368],$b=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,151,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],zb=[1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,135,139,140,156],ac= +[1,381],bc=[1,382],Ab=[6,31,32,94],cc=[6,31,32,70],Bb=[1,6,31,32,42,65,70,73,89,94,115,120,122,127,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],dc=[31,70],qb=[1,408],rb=[1,409],Cb=[1,415],Db=[1,416],ec={trace:function(){},yy:{},symbols_:{error:2,Root:3,Body:4,Line:5,TERMINATOR:6,Expression:7,Statement:8,YieldReturn:9,Return:10,Comment:11,STATEMENT:12,Import:13,Export:14,Value:15,Invocation:16,Code:17,Operation:18,Assign:19,If:20,Try:21,While:22,For:23,Switch:24, +Class:25,Throw:26,Yield:27,YIELD:28,FROM:29,Block:30,INDENT:31,OUTDENT:32,Identifier:33,IDENTIFIER:34,Property:35,PROPERTY:36,AlphaNumeric:37,NUMBER:38,String:39,STRING:40,STRING_START:41,STRING_END:42,Regex:43,REGEX:44,REGEX_START:45,REGEX_END:46,Literal:47,JS:48,UNDEFINED:49,NULL:50,BOOL:51,INFINITY:52,NAN:53,Assignable:54,"\x3d":55,AssignObj:56,ObjAssignable:57,":":58,SimpleObjAssignable:59,ThisProperty:60,RETURN:61,HERECOMMENT:62,PARAM_START:63,ParamList:64,PARAM_END:65,FuncGlyph:66,"-\x3e":67, +"\x3d\x3e":68,OptComma:69,",":70,Param:71,ParamVar:72,"...":73,Array:74,Object:75,Splat:76,SimpleAssignable:77,Accessor:78,Parenthetical:79,Range:80,This:81,".":82,"?.":83,"::":84,"?::":85,Index:86,INDEX_START:87,IndexValue:88,INDEX_END:89,INDEX_SOAK:90,Slice:91,"{":92,AssignList:93,"}":94,CLASS:95,EXTENDS:96,IMPORT:97,ImportDefaultSpecifier:98,ImportNamespaceSpecifier:99,ImportSpecifierList:100,ImportSpecifier:101,AS:102,DEFAULT:103,IMPORT_ALL:104,EXPORT:105,ExportSpecifierList:106,EXPORT_ALL:107, +ExportSpecifier:108,OptFuncExist:109,Arguments:110,Super:111,SUPER:112,FUNC_EXIST:113,CALL_START:114,CALL_END:115,ArgList:116,THIS:117,"@":118,"[":119,"]":120,RangeDots:121,"..":122,Arg:123,SimpleArgs:124,TRY:125,Catch:126,FINALLY:127,CATCH:128,THROW:129,"(":130,")":131,WhileSource:132,WHILE:133,WHEN:134,UNTIL:135,Loop:136,LOOP:137,ForBody:138,FOR:139,BY:140,ForStart:141,ForSource:142,ForVariables:143,OWN:144,ForValue:145,FORIN:146,FOROF:147,FORFROM:148,SWITCH:149,Whens:150,ELSE:151,When:152,LEADING_WHEN:153, +IfBlock:154,IF:155,POST_IF:156,UNARY:157,UNARY_MATH:158,"-":159,"+":160,"--":161,"++":162,"?":163,MATH:164,"**":165,SHIFT:166,COMPARE:167,"\x26":168,"^":169,"|":170,"\x26\x26":171,"||":172,"BIN?":173,RELATION:174,COMPOUND_ASSIGN:175,$accept:0,$end:1},terminals_:{2:"error",6:"TERMINATOR",12:"STATEMENT",28:"YIELD",29:"FROM",31:"INDENT",32:"OUTDENT",34:"IDENTIFIER",36:"PROPERTY",38:"NUMBER",40:"STRING",41:"STRING_START",42:"STRING_END",44:"REGEX",45:"REGEX_START",46:"REGEX_END",48:"JS",49:"UNDEFINED", +50:"NULL",51:"BOOL",52:"INFINITY",53:"NAN",55:"\x3d",58:":",61:"RETURN",62:"HERECOMMENT",63:"PARAM_START",65:"PARAM_END",67:"-\x3e",68:"\x3d\x3e",70:",",73:"...",82:".",83:"?.",84:"::",85:"?::",87:"INDEX_START",89:"INDEX_END",90:"INDEX_SOAK",92:"{",94:"}",95:"CLASS",96:"EXTENDS",97:"IMPORT",102:"AS",103:"DEFAULT",104:"IMPORT_ALL",105:"EXPORT",107:"EXPORT_ALL",112:"SUPER",113:"FUNC_EXIST",114:"CALL_START",115:"CALL_END",117:"THIS",118:"@",119:"[",120:"]",122:"..",125:"TRY",127:"FINALLY",128:"CATCH", +129:"THROW",130:"(",131:")",133:"WHILE",134:"WHEN",135:"UNTIL",137:"LOOP",139:"FOR",140:"BY",144:"OWN",146:"FORIN",147:"FOROF",148:"FORFROM",149:"SWITCH",151:"ELSE",153:"LEADING_WHEN",155:"IF",156:"POST_IF",157:"UNARY",158:"UNARY_MATH",159:"-",160:"+",161:"--",162:"++",163:"?",164:"MATH",165:"**",166:"SHIFT",167:"COMPARE",168:"\x26",169:"^",170:"|",171:"\x26\x26",172:"||",173:"BIN?",174:"RELATION",175:"COMPOUND_ASSIGN"},productions_:[0,[3,0],[3,1],[4,1],[4,3],[4,2],[5,1],[5,1],[5,1],[8,1],[8,1],[8, +1],[8,1],[8,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[7,1],[27,1],[27,2],[27,3],[30,2],[30,3],[33,1],[35,1],[37,1],[37,1],[39,1],[39,3],[43,1],[43,3],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[47,1],[19,3],[19,4],[19,5],[56,1],[56,3],[56,5],[56,3],[56,5],[56,1],[59,1],[59,1],[59,1],[57,1],[57,1],[10,2],[10,1],[9,3],[9,2],[11,1],[17,5],[17,2],[66,1],[66,1],[69,0],[69,1],[64,0],[64,1],[64,3],[64,4],[64,6],[71,1],[71,2],[71,3],[71,1],[72,1],[72,1],[72,1],[72, +1],[76,2],[77,1],[77,2],[77,2],[77,1],[54,1],[54,1],[54,1],[15,1],[15,1],[15,1],[15,1],[15,1],[78,2],[78,2],[78,2],[78,2],[78,1],[78,1],[86,3],[86,2],[88,1],[88,1],[75,4],[93,0],[93,1],[93,3],[93,4],[93,6],[25,1],[25,2],[25,3],[25,4],[25,2],[25,3],[25,4],[25,5],[13,2],[13,4],[13,4],[13,5],[13,7],[13,6],[13,9],[100,1],[100,3],[100,4],[100,4],[100,6],[101,1],[101,3],[101,1],[101,3],[98,1],[99,3],[14,3],[14,5],[14,2],[14,4],[14,5],[14,6],[14,3],[14,4],[14,7],[106,1],[106,3],[106,4],[106,4],[106,6],[108, +1],[108,3],[108,3],[108,1],[108,3],[16,3],[16,3],[16,3],[16,1],[111,1],[111,2],[109,0],[109,1],[110,2],[110,4],[81,1],[81,1],[60,2],[74,2],[74,4],[121,1],[121,1],[80,5],[91,3],[91,2],[91,2],[91,1],[116,1],[116,3],[116,4],[116,4],[116,6],[123,1],[123,1],[123,1],[124,1],[124,3],[21,2],[21,3],[21,4],[21,5],[126,3],[126,3],[126,2],[26,2],[79,3],[79,5],[132,2],[132,4],[132,2],[132,4],[22,2],[22,2],[22,2],[22,1],[136,2],[136,2],[23,2],[23,2],[23,2],[138,2],[138,4],[138,2],[141,2],[141,3],[145,1],[145,1], +[145,1],[145,1],[143,1],[143,3],[142,2],[142,2],[142,4],[142,4],[142,4],[142,6],[142,6],[142,2],[142,4],[24,5],[24,7],[24,4],[24,6],[150,1],[150,2],[152,3],[152,4],[154,3],[154,5],[20,1],[20,3],[20,3],[20,3],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,2],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,3],[18,5],[18,4],[18,3]],performAction:function(a,p,t,d,wa,b,e){a=b.length-1;switch(wa){case 1:return this.$=d.addLocationDataFn(e[a],e[a])(new d.Block); +case 2:return this.$=b[a];case 3:this.$=d.addLocationDataFn(e[a],e[a])(d.Block.wrap([b[a]]));break;case 4:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a-2].push(b[a]));break;case 5:this.$=b[a-1];break;case 6:case 7:case 8:case 9:case 10:case 12:case 13:case 14:case 15:case 16:case 17:case 18:case 19:case 20:case 21:case 22:case 23:case 24:case 25:case 26:case 35:case 40:case 42:case 56:case 57:case 58:case 59:case 60:case 61:case 71:case 72:case 82:case 83:case 84:case 85:case 90:case 91:case 94:case 98:case 104:case 163:case 187:case 188:case 190:case 220:case 221:case 239:case 245:this.$= +b[a];break;case 11:this.$=d.addLocationDataFn(e[a],e[a])(new d.StatementLiteral(b[a]));break;case 27:this.$=d.addLocationDataFn(e[a],e[a])(new d.Op(b[a],new d.Value(new d.Literal(""))));break;case 28:case 249:case 250:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op(b[a-1],b[a]));break;case 29:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op(b[a-2].concat(b[a-1]),b[a]));break;case 30:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Block);break;case 31:case 105:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a- +1]);break;case 32:this.$=d.addLocationDataFn(e[a],e[a])(new d.IdentifierLiteral(b[a]));break;case 33:this.$=d.addLocationDataFn(e[a],e[a])(new d.PropertyName(b[a]));break;case 34:this.$=d.addLocationDataFn(e[a],e[a])(new d.NumberLiteral(b[a]));break;case 36:this.$=d.addLocationDataFn(e[a],e[a])(new d.StringLiteral(b[a]));break;case 37:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.StringWithInterpolations(b[a-1]));break;case 38:this.$=d.addLocationDataFn(e[a],e[a])(new d.RegexLiteral(b[a]));break; +case 39:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.RegexWithInterpolations(b[a-1].args));break;case 41:this.$=d.addLocationDataFn(e[a],e[a])(new d.PassthroughLiteral(b[a]));break;case 43:this.$=d.addLocationDataFn(e[a],e[a])(new d.UndefinedLiteral);break;case 44:this.$=d.addLocationDataFn(e[a],e[a])(new d.NullLiteral);break;case 45:this.$=d.addLocationDataFn(e[a],e[a])(new d.BooleanLiteral(b[a]));break;case 46:this.$=d.addLocationDataFn(e[a],e[a])(new d.InfinityLiteral(b[a]));break;case 47:this.$= +d.addLocationDataFn(e[a],e[a])(new d.NaNLiteral);break;case 48:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(b[a-2],b[a]));break;case 49:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Assign(b[a-3],b[a]));break;case 50:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(b[a-4],b[a-1]));break;case 51:case 87:case 92:case 93:case 95:case 96:case 97:case 222:case 223:this.$=d.addLocationDataFn(e[a],e[a])(new d.Value(b[a]));break;case 52:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(d.addLocationDataFn(e[a- +2])(new d.Value(b[a-2])),b[a],"object",{operatorToken:d.addLocationDataFn(e[a-1])(new d.Literal(b[a-1]))}));break;case 53:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(d.addLocationDataFn(e[a-4])(new d.Value(b[a-4])),b[a-1],"object",{operatorToken:d.addLocationDataFn(e[a-3])(new d.Literal(b[a-3]))}));break;case 54:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(d.addLocationDataFn(e[a-2])(new d.Value(b[a-2])),b[a],null,{operatorToken:d.addLocationDataFn(e[a-1])(new d.Literal(b[a-1]))})); +break;case 55:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(d.addLocationDataFn(e[a-4])(new d.Value(b[a-4])),b[a-1],null,{operatorToken:d.addLocationDataFn(e[a-3])(new d.Literal(b[a-3]))}));break;case 62:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Return(b[a]));break;case 63:this.$=d.addLocationDataFn(e[a],e[a])(new d.Return);break;case 64:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.YieldReturn(b[a]));break;case 65:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.YieldReturn);break;case 66:this.$= +d.addLocationDataFn(e[a],e[a])(new d.Comment(b[a]));break;case 67:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Code(b[a-3],b[a],b[a-1]));break;case 68:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Code([],b[a],b[a-1]));break;case 69:this.$=d.addLocationDataFn(e[a],e[a])("func");break;case 70:this.$=d.addLocationDataFn(e[a],e[a])("boundfunc");break;case 73:case 110:this.$=d.addLocationDataFn(e[a],e[a])([]);break;case 74:case 111:case 130:case 150:case 182:case 224:this.$=d.addLocationDataFn(e[a], +e[a])([b[a]]);break;case 75:case 112:case 131:case 151:case 183:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a-2].concat(b[a]));break;case 76:case 113:case 132:case 152:case 184:this.$=d.addLocationDataFn(e[a-3],e[a])(b[a-3].concat(b[a]));break;case 77:case 114:case 134:case 154:case 186:this.$=d.addLocationDataFn(e[a-5],e[a])(b[a-5].concat(b[a-2]));break;case 78:this.$=d.addLocationDataFn(e[a],e[a])(new d.Param(b[a]));break;case 79:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Param(b[a-1],null,!0)); +break;case 80:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Param(b[a-2],b[a]));break;case 81:case 189:this.$=d.addLocationDataFn(e[a],e[a])(new d.Expansion);break;case 86:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Splat(b[a-1]));break;case 88:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a-1].add(b[a]));break;case 89:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Value(b[a-1],[].concat(b[a])));break;case 99:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Access(b[a]));break;case 100:this.$=d.addLocationDataFn(e[a- +1],e[a])(new d.Access(b[a],"soak"));break;case 101:this.$=d.addLocationDataFn(e[a-1],e[a])([d.addLocationDataFn(e[a-1])(new d.Access(new d.PropertyName("prototype"))),d.addLocationDataFn(e[a])(new d.Access(b[a]))]);break;case 102:this.$=d.addLocationDataFn(e[a-1],e[a])([d.addLocationDataFn(e[a-1])(new d.Access(new d.PropertyName("prototype"),"soak")),d.addLocationDataFn(e[a])(new d.Access(b[a]))]);break;case 103:this.$=d.addLocationDataFn(e[a],e[a])(new d.Access(new d.PropertyName("prototype"))); +break;case 106:this.$=d.addLocationDataFn(e[a-1],e[a])(d.extend(b[a],{soak:!0}));break;case 107:this.$=d.addLocationDataFn(e[a],e[a])(new d.Index(b[a]));break;case 108:this.$=d.addLocationDataFn(e[a],e[a])(new d.Slice(b[a]));break;case 109:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Obj(b[a-2],b[a-3].generated));break;case 115:this.$=d.addLocationDataFn(e[a],e[a])(new d.Class);break;case 116:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Class(null,null,b[a]));break;case 117:this.$=d.addLocationDataFn(e[a- +2],e[a])(new d.Class(null,b[a]));break;case 118:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Class(null,b[a-1],b[a]));break;case 119:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Class(b[a]));break;case 120:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Class(b[a-1],null,b[a]));break;case 121:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Class(b[a-2],b[a]));break;case 122:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Class(b[a-3],b[a-1],b[a]));break;case 123:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.ImportDeclaration(null, +b[a]));break;case 124:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ImportDeclaration(new d.ImportClause(b[a-2],null),b[a]));break;case 125:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ImportDeclaration(new d.ImportClause(null,b[a-2]),b[a]));break;case 126:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.ImportDeclaration(new d.ImportClause(null,new d.ImportSpecifierList([])),b[a]));break;case 127:this.$=d.addLocationDataFn(e[a-6],e[a])(new d.ImportDeclaration(new d.ImportClause(null,new d.ImportSpecifierList(b[a- +4])),b[a]));break;case 128:this.$=d.addLocationDataFn(e[a-5],e[a])(new d.ImportDeclaration(new d.ImportClause(b[a-4],b[a-2]),b[a]));break;case 129:this.$=d.addLocationDataFn(e[a-8],e[a])(new d.ImportDeclaration(new d.ImportClause(b[a-7],new d.ImportSpecifierList(b[a-4])),b[a]));break;case 133:case 153:case 169:case 185:this.$=d.addLocationDataFn(e[a-3],e[a])(b[a-2]);break;case 135:this.$=d.addLocationDataFn(e[a],e[a])(new d.ImportSpecifier(b[a]));break;case 136:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ImportSpecifier(b[a- +2],b[a]));break;case 137:this.$=d.addLocationDataFn(e[a],e[a])(new d.ImportSpecifier(new d.Literal(b[a])));break;case 138:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ImportSpecifier(new d.Literal(b[a-2]),b[a]));break;case 139:this.$=d.addLocationDataFn(e[a],e[a])(new d.ImportDefaultSpecifier(b[a]));break;case 140:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ImportNamespaceSpecifier(new d.Literal(b[a-2]),b[a]));break;case 141:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportNamedDeclaration(new d.ExportSpecifierList([]))); +break;case 142:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.ExportNamedDeclaration(new d.ExportSpecifierList(b[a-2])));break;case 143:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.ExportNamedDeclaration(b[a]));break;case 144:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ExportNamedDeclaration(new d.Assign(b[a-2],b[a],null,{moduleDeclaration:"export"})));break;case 145:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.ExportNamedDeclaration(new d.Assign(b[a-3],b[a],null,{moduleDeclaration:"export"}))); +break;case 146:this.$=d.addLocationDataFn(e[a-5],e[a])(new d.ExportNamedDeclaration(new d.Assign(b[a-4],b[a-1],null,{moduleDeclaration:"export"})));break;case 147:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportDefaultDeclaration(b[a]));break;case 148:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.ExportAllDeclaration(new d.Literal(b[a-2]),b[a]));break;case 149:this.$=d.addLocationDataFn(e[a-6],e[a])(new d.ExportNamedDeclaration(new d.ExportSpecifierList(b[a-4]),b[a]));break;case 155:this.$=d.addLocationDataFn(e[a], +e[a])(new d.ExportSpecifier(b[a]));break;case 156:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportSpecifier(b[a-2],b[a]));break;case 157:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportSpecifier(b[a-2],new d.Literal(b[a])));break;case 158:this.$=d.addLocationDataFn(e[a],e[a])(new d.ExportSpecifier(new d.Literal(b[a])));break;case 159:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.ExportSpecifier(new d.Literal(b[a-2]),b[a]));break;case 160:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.TaggedTemplateCall(b[a- +2],b[a],b[a-1]));break;case 161:case 162:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Call(b[a-2],b[a],b[a-1]));break;case 164:this.$=d.addLocationDataFn(e[a],e[a])(new d.SuperCall);break;case 165:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.SuperCall(b[a]));break;case 166:this.$=d.addLocationDataFn(e[a],e[a])(!1);break;case 167:this.$=d.addLocationDataFn(e[a],e[a])(!0);break;case 168:this.$=d.addLocationDataFn(e[a-1],e[a])([]);break;case 170:case 171:this.$=d.addLocationDataFn(e[a],e[a])(new d.Value(new d.ThisLiteral)); +break;case 172:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Value(d.addLocationDataFn(e[a-1])(new d.ThisLiteral),[d.addLocationDataFn(e[a])(new d.Access(b[a]))],"this"));break;case 173:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Arr([]));break;case 174:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Arr(b[a-2]));break;case 175:this.$=d.addLocationDataFn(e[a],e[a])("inclusive");break;case 176:this.$=d.addLocationDataFn(e[a],e[a])("exclusive");break;case 177:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Range(b[a- +3],b[a-1],b[a-2]));break;case 178:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Range(b[a-2],b[a],b[a-1]));break;case 179:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Range(b[a-1],null,b[a]));break;case 180:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Range(null,b[a],b[a-1]));break;case 181:this.$=d.addLocationDataFn(e[a],e[a])(new d.Range(null,null,b[a]));break;case 191:this.$=d.addLocationDataFn(e[a-2],e[a])([].concat(b[a-2],b[a]));break;case 192:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Try(b[a])); +break;case 193:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Try(b[a-1],b[a][0],b[a][1]));break;case 194:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Try(b[a-2],null,null,b[a]));break;case 195:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Try(b[a-3],b[a-2][0],b[a-2][1],b[a]));break;case 196:this.$=d.addLocationDataFn(e[a-2],e[a])([b[a-1],b[a]]);break;case 197:this.$=d.addLocationDataFn(e[a-2],e[a])([d.addLocationDataFn(e[a-1])(new d.Value(b[a-1])),b[a]]);break;case 198:this.$=d.addLocationDataFn(e[a- +1],e[a])([null,b[a]]);break;case 199:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Throw(b[a]));break;case 200:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Parens(b[a-1]));break;case 201:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Parens(b[a-2]));break;case 202:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.While(b[a]));break;case 203:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.While(b[a-2],{guard:b[a]}));break;case 204:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.While(b[a],{invert:!0}));break; +case 205:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.While(b[a-2],{invert:!0,guard:b[a]}));break;case 206:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a-1].addBody(b[a]));break;case 207:case 208:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a].addBody(d.addLocationDataFn(e[a-1])(d.Block.wrap([b[a-1]]))));break;case 209:this.$=d.addLocationDataFn(e[a],e[a])(b[a]);break;case 210:this.$=d.addLocationDataFn(e[a-1],e[a])((new d.While(d.addLocationDataFn(e[a-1])(new d.BooleanLiteral("true")))).addBody(b[a])); +break;case 211:this.$=d.addLocationDataFn(e[a-1],e[a])((new d.While(d.addLocationDataFn(e[a-1])(new d.BooleanLiteral("true")))).addBody(d.addLocationDataFn(e[a])(d.Block.wrap([b[a]]))));break;case 212:case 213:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.For(b[a-1],b[a]));break;case 214:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.For(b[a],b[a-1]));break;case 215:this.$=d.addLocationDataFn(e[a-1],e[a])({source:d.addLocationDataFn(e[a])(new d.Value(b[a]))});break;case 216:this.$=d.addLocationDataFn(e[a- +3],e[a])({source:d.addLocationDataFn(e[a-2])(new d.Value(b[a-2])),step:b[a]});break;case 217:d=d.addLocationDataFn(e[a-1],e[a]);b[a].own=b[a-1].own;b[a].ownTag=b[a-1].ownTag;b[a].name=b[a-1][0];b[a].index=b[a-1][1];this.$=d(b[a]);break;case 218:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a]);break;case 219:wa=d.addLocationDataFn(e[a-2],e[a]);b[a].own=!0;b[a].ownTag=d.addLocationDataFn(e[a-1])(new d.Literal(b[a-1]));this.$=wa(b[a]);break;case 225:this.$=d.addLocationDataFn(e[a-2],e[a])([b[a-2],b[a]]); +break;case 226:this.$=d.addLocationDataFn(e[a-1],e[a])({source:b[a]});break;case 227:this.$=d.addLocationDataFn(e[a-1],e[a])({source:b[a],object:!0});break;case 228:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],guard:b[a]});break;case 229:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],guard:b[a],object:!0});break;case 230:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],step:b[a]});break;case 231:this.$=d.addLocationDataFn(e[a-5],e[a])({source:b[a-4],guard:b[a-2],step:b[a]}); +break;case 232:this.$=d.addLocationDataFn(e[a-5],e[a])({source:b[a-4],step:b[a-2],guard:b[a]});break;case 233:this.$=d.addLocationDataFn(e[a-1],e[a])({source:b[a],from:!0});break;case 234:this.$=d.addLocationDataFn(e[a-3],e[a])({source:b[a-2],guard:b[a],from:!0});break;case 235:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Switch(b[a-3],b[a-1]));break;case 236:this.$=d.addLocationDataFn(e[a-6],e[a])(new d.Switch(b[a-5],b[a-3],b[a-1]));break;case 237:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Switch(null, +b[a-1]));break;case 238:this.$=d.addLocationDataFn(e[a-5],e[a])(new d.Switch(null,b[a-3],b[a-1]));break;case 240:this.$=d.addLocationDataFn(e[a-1],e[a])(b[a-1].concat(b[a]));break;case 241:this.$=d.addLocationDataFn(e[a-2],e[a])([[b[a-1],b[a]]]);break;case 242:this.$=d.addLocationDataFn(e[a-3],e[a])([[b[a-2],b[a-1]]]);break;case 243:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.If(b[a-1],b[a],{type:b[a-2]}));break;case 244:this.$=d.addLocationDataFn(e[a-4],e[a])(b[a-4].addElse(d.addLocationDataFn(e[a- +2],e[a])(new d.If(b[a-1],b[a],{type:b[a-2]}))));break;case 246:this.$=d.addLocationDataFn(e[a-2],e[a])(b[a-2].addElse(b[a]));break;case 247:case 248:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.If(b[a],d.addLocationDataFn(e[a-2])(d.Block.wrap([b[a-2]])),{type:b[a-1],statement:!0}));break;case 251:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("-",b[a]));break;case 252:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("+",b[a]));break;case 253:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("--", +b[a]));break;case 254:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("++",b[a]));break;case 255:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("--",b[a-1],null,!0));break;case 256:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Op("++",b[a-1],null,!0));break;case 257:this.$=d.addLocationDataFn(e[a-1],e[a])(new d.Existence(b[a-1]));break;case 258:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op("+",b[a-2],b[a]));break;case 259:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op("-",b[a-2],b[a]));break; +case 260:case 261:case 262:case 263:case 264:case 265:case 266:case 267:case 268:case 269:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Op(b[a-1],b[a-2],b[a]));break;case 270:e=d.addLocationDataFn(e[a-2],e[a]);b="!"===b[a-1].charAt(0)?(new d.Op(b[a-1].slice(1),b[a-2],b[a])).invert():new d.Op(b[a-1],b[a-2],b[a]);this.$=e(b);break;case 271:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Assign(b[a-2],b[a],b[a-1]));break;case 272:this.$=d.addLocationDataFn(e[a-4],e[a])(new d.Assign(b[a-4],b[a-1],b[a-3])); +break;case 273:this.$=d.addLocationDataFn(e[a-3],e[a])(new d.Assign(b[a-3],b[a],b[a-2]));break;case 274:this.$=d.addLocationDataFn(e[a-2],e[a])(new d.Extends(b[a-2],b[a]))}},table:[{1:[2,1],3:1,4:2,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k, +97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{1:[3]},{1:[2,2],6:qa},a(sa,[2,3]),a(sa,[2,6],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(sa,[2,7],{141:77,132:105,138:106,133:D,135:A,139:E,156:va}),a(sa,[2,8]),a(N,[2,14],{109:107,78:108,86:114,40:xa,41:xa,114:xa,82:ta,83:Na, +84:Fa,85:Ga,87:Ca,90:Pa,113:Ia}),a(N,[2,15],{86:114,109:117,78:118,82:ta,83:Na,84:Fa,85:Ga,87:Ca,90:Pa,113:Ia,114:xa}),a(N,[2,16]),a(N,[2,17]),a(N,[2,18]),a(N,[2,19]),a(N,[2,20]),a(N,[2,21]),a(N,[2,22]),a(N,[2,23]),a(N,[2,24]),a(N,[2,25]),a(N,[2,26]),a(Ea,[2,9]),a(Ea,[2,10]),a(Ea,[2,11]),a(Ea,[2,12]),a(Ea,[2,13]),a([1,6,32,42,131,133,135,139,156,163,164,165,166,167,168,169,170,171,172,173,174],Va,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26, +47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,7:120,8:122,12:b,28:ea,29:Ya,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:[1,119],62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(Ba,Ha,{55:[1,124]}),a(Ba,[2,95]),a(Ba,[2,96]),a(Ba,[2,97]),a(Ba,[2,98]),a(t,[2,163]),a([6,31,65,70],p,{64:125,71:126,72:127,33:129,60:130, +74:131,75:132,34:g,73:d,92:m,118:wa,119:e}),{30:135,31:Da},{7:137,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C, +158:T,159:v,160:Y,161:S,162:M},{7:138,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}, +{7:139,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:140,8:122,10:20,11:21,12:b, +13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{15:142,16:143,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57, +44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:144,60:71,74:53,75:54,77:141,79:28,80:29,81:30,92:m,111:31,112:L,117:V,118:X,119:G,130:W},{15:142,16:143,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:144,60:71,74:53,75:54,77:145,79:28,80:29,81:30,92:m,111:31,112:L,117:V,118:X,119:G,130:W},a(Ta,ua,{96:[1,149],161:[1,146],162:[1,147],175:[1,148]}),a(N,[2,245],{151:[1,150]}),{30:151,31:Da},{30:152,31:Da},a(N,[2,209]),{30:153,31:Da},{7:154,8:122,10:20,11:21, +12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,155],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Eb,[2,115],{47:27,79:28,80:29,81:30,111:31, +74:53,75:54,37:55,43:57,33:70,60:71,39:80,15:142,16:143,54:144,30:156,77:158,31:Da,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,92:m,96:[1,157],112:L,117:V,118:X,119:G,130:W}),{7:159,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P, +111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ea,Za,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,7:160,12:b,28:ea,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w, +92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a([1,6,31,32,42,70,94,131,133,135,139,156],[2,66]),{33:165,34:g,39:161,40:r,41:n,92:[1,164],98:162,99:163,104:Fb},{25:168,33:169,34:g,92:[1,167],95:k,103:[1,170],107:[1,171]},a(Ta,[2,92]),a(Ta,[2,93]),a(Ba,[2,40]),a(Ba,[2,41]),a(Ba,[2,42]),a(Ba,[2,43]),a(Ba,[2,44]),a(Ba,[2,45]),a(Ba,[2,46]),a(Ba,[2,47]),{4:172,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, +20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,31:[1,173],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:174,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14, +23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,116:176,117:V,118:X,119:G,120:Gb,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ba,[2,170]),a(Ba,[2,171],{35:181,36:Oa}),a([1,6,31,32,42,46,65,70,73,82, +83,84,85,87,89,90,94,113,115,120,122,131,133,134,135,139,140,156,159,160,163,164,165,166,167,168,169,170,171,172,173,174],[2,164],{110:183,114:sb}),{31:[2,69]},{31:[2,70]},a(La,[2,87]),a(La,[2,90]),{7:185,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K, +105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:186,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X, +119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:187,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43, +133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:189,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,30:188,31:Da,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44, +137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{33:194,34:g,60:195,74:196,75:197,80:190,92:m,118:wa,119:G,143:191,144:[1,192],145:193},{142:198,146:[1,199],147:[1,200],148:[1,201]},a([6,31,70,94],Hb,{39:80,93:202,56:203,57:204,59:205,11:206,37:207,33:208,35:209,60:210,34:g,36:Oa,38:h,40:r,41:n,62:z,118:wa}),a(Ib,[2,34]),a(Ib,[2,35]),a(Ba,[2,38]),{15:142,16:211,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:144,60:71, +74:53,75:54,77:212,79:28,80:29,81:30,92:m,111:31,112:L,117:V,118:X,119:G,130:W},a([1,6,29,31,32,40,41,42,55,58,65,70,73,82,83,84,85,87,89,90,94,96,102,113,114,115,120,122,131,133,134,135,139,140,146,147,148,156,159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,175],[2,32]),a(Jb,[2,36]),{4:213,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F, +50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(sa,[2,5],{7:4,8:5,9:6,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57, +33:70,60:71,141:77,39:80,5:214,12:b,28:u,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,133:D,135:A,137:q,139:E,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(N,[2,257]),{7:215,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71, +61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:216,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w, +74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:217,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29, +81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:218,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31, +112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:219,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa, +129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:220,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A, +136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:221,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77, +149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:222,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T, +159:v,160:Y,161:S,162:M},{7:223,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:224, +8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:225,8:122,10:20,11:21,12:b,13:23, +14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:226,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, +20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:227,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16, +25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:228,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70, +34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,208]),a(N,[2,213]),{7:229,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g, +37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,207]),a(N,[2,212]),{39:230,40:r,41:n,110:231,114:sb},a(La,[2,88]),a(Kb,[2,167]),{35:232,36:Oa},{35:233,36:Oa},a(La,[2,103],{35:234,36:Oa}),{35:235,36:Oa},a(La, +[2,104]),{7:237,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Lb,74:53,75:54,77:40,79:28,80:29,81:30,88:236,91:238,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,121:239,122:tb,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y, +161:S,162:M},{86:242,87:Ca,90:Pa},{110:243,114:sb},a(La,[2,89]),a(sa,[2,65],{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,7:244,12:b,28:ea,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,133:Za,135:Za,139:Za,156:Za, +137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(Ma,[2,28],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:245,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P, +111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{132:105,133:D,135:A,138:106,139:E,141:77,156:va},a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,163,164,165,166,167,168,169,170,171,172,173,174],Va,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43,136:44, +138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,7:120,8:122,12:b,28:ea,29:Ya,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,137:q,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),{6:[1,247],7:246,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,248],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I, +49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([6,31],Ja,{69:251,65:[1,249],70:Mb}),a(Sa,[2,74]),a(Sa,[2,78],{55:[1,253],73:[1,252]}),a(Sa,[2,81]),a(fb,[2,82]),a(fb,[2,83]),a(fb,[2,84]),a(fb,[2,85]),{35:181,36:Oa},{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7, +16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,116:176,117:V,118:X,119:G,120:Gb,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,68]),{4:256,5:3,7:4,8:5,9:6, +10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,32:[1,255],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([1,6,31,32,42,65,70,73,89,94, +115,120,122,131,133,134,135,139,140,156,159,160,164,165,166,167,168,169,170,171,172,173,174],[2,249],{141:77,132:102,138:103,163:fa}),a(ab,[2,250],{141:77,132:102,138:103,163:fa,165:ga}),a(ab,[2,251],{141:77,132:102,138:103,163:fa,165:ga}),a(ab,[2,252],{141:77,132:102,138:103,163:fa,165:ga}),a(N,[2,253],{40:ua,41:ua,82:ua,83:ua,84:ua,85:ua,87:ua,90:ua,113:ua,114:ua}),a(Kb,xa,{109:107,78:108,86:114,82:ta,83:Na,84:Fa,85:Ga,87:Ca,90:Pa,113:Ia}),{78:118,82:ta,83:Na,84:Fa,85:Ga,86:114,87:Ca,90:Pa,109:117, +113:Ia,114:xa},a(Nb,Ha),a(N,[2,254],{40:ua,41:ua,82:ua,83:ua,84:ua,85:ua,87:ua,90:ua,113:ua,114:ua}),a(N,[2,255]),a(N,[2,256]),{6:[1,259],7:257,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,258],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U, +130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:260,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44, +137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{30:261,31:Da,155:[1,262]},a(N,[2,192],{126:263,127:[1,264],128:[1,265]}),a(N,[2,206]),a(N,[2,214]),{31:[1,266],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{150:267,152:268,153:gb},a(N,[2,116]),{7:270,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea, +33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Eb,[2,119],{30:271,31:Da,40:ua,41:ua,82:ua,83:ua,84:ua,85:ua,87:ua,90:ua,113:ua,114:ua,96:[1,272]}),a(Ma,[2,199],{141:77,132:102,138:103,159:ma,160:Z, +163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,bb,{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,[2,123]),{29:[1,273],70:[1,274]},{29:[1,275]},{31:hb,33:280,34:g,94:[1,276],100:277,101:278,103:Wa},a([29,70],[2,139]),{102:[1,282]},{31:ub,33:287,34:g,94:[1,283],103:cb,106:284,108:285},a(Ea,[2,143]),{55:[1,289]},{7:290,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, +20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{29:[1,291]},{6:qa,131:[1,292]},{4:293,5:3,7:4,8:5,9:6,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9, +18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:u,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([6,31,70,120],Ob,{141:77,132:102,138:103,121:294,73:[1,295],122:tb,133:D,135:A,139:E, +156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(vb,[2,173]),a([6,31,120],Ja,{69:296,70:ib}),a(Qa,[2,182]),{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31, +112:L,116:298,117:V,118:X,119:G,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Qa,[2,188]),a(Qa,[2,189]),a(Pb,[2,172]),a(Pb,[2,33]),a(t,[2,165]),{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua, +74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,115:[1,299],116:300,117:V,118:X,119:G,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{30:301,31:Da,132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Qb,[2,202],{141:77,132:102,138:103,133:D,134:[1,302],135:A,139:E,159:ma,160:Z,163:fa,164:ia, +165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Qb,[2,204],{141:77,132:102,138:103,133:D,134:[1,303],135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,210]),a(Xa,[2,211],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,156,159,160,163,164,165,166,167,168, +169,170,171,172,173,174],[2,215],{140:[1,304]}),a(jb,[2,218]),{33:194,34:g,60:195,74:196,75:197,92:m,118:wa,119:e,143:305,145:193},a(jb,[2,224],{70:[1,306]}),a(kb,[2,220]),a(kb,[2,221]),a(kb,[2,222]),a(kb,[2,223]),a(N,[2,217]),{7:307,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40, +79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:308,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k, +97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:309,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V, +118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(lb,Ja,{69:310,70:Rb}),a(Aa,[2,111]),a(Aa,[2,51],{58:[1,312]}),a(Sb,[2,60],{55:[1,313]}),a(Aa,[2,56]),a(Sb,[2,61]),a(wb,[2,57]),a(wb,[2,58]),a(wb,[2,59]),{46:[1,314],78:118,82:ta,83:Na,84:Fa,85:Ga,86:114,87:Ca,90:Pa,109:117,113:Ia,114:xa},a(Nb,ua),{6:qa,42:[1,315]},a(sa,[2,4]),a(Tb,[2,258],{141:77,132:102,138:103,163:fa,164:ia,165:ga}),a(Tb,[2,259],{141:77, +132:102,138:103,163:fa,164:ia,165:ga}),a(ab,[2,260],{141:77,132:102,138:103,163:fa,165:ga}),a(ab,[2,261],{141:77,132:102,138:103,163:fa,165:ga}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,166,167,168,169,170,171,172,173,174],[2,262],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,167,168,169,170,171,172,173],[2,263],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,174:da}), +a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,168,169,170,171,172,173],[2,264],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,169,170,171,172,173],[2,265],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,170,171,172,173],[2,266],{141:77,132:102,138:103,159:ma,160:Z, +163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,171,172,173],[2,267],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,172,173],[2,268],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134, +135,139,140,156,173],[2,269],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,174:da}),a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,134,135,139,140,156,167,168,169,170,171,172,173,174],[2,270],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja}),a(Xa,[2,248],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Xa,[2,247],{141:77,132:102, +138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(t,[2,160]),a(t,[2,161]),a(La,[2,99]),a(La,[2,100]),a(La,[2,101]),a(La,[2,102]),{89:[1,316]},{73:Lb,89:[2,107],121:317,122:tb,132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{89:[2,108]},{7:318,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15, +24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,89:[2,181],92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ub,[2,175]),a(Ub,Vb),a(La,[2,106]),a(t,[2,162]),a(sa,[2,64],{141:77,132:102,138:103,133:bb,135:bb,139:bb,156:bb, +159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,29],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,48],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:319,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g, +37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:320,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57, +44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{66:321,67:c,68:w},a(Ra,db,{72:127,33:129,60:130,74:131,75:132,71:322,34:g,73:d,92:m,118:wa,119:e}),{6:Wb,31:Xb},a(Sa,[2,79]),{7:325,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, +20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Qa,Ob,{141:77,132:102,138:103,73:[1,326],133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga, +166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Yb,[2,30]),{6:qa,32:[1,327]},a(Ma,[2,271],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:328,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40, +79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:329,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k, +97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ma,[2,274],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,246]),{7:330,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27, +48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,193],{127:[1,331]}),{30:332,31:Da},{30:335,31:Da,33:333,34:g,75:334,92:m},{150:336,152:268,153:gb},{32:[1,337],151:[1,338],152:339,153:gb},a(mb,[2,239]),{7:341,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8, +17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,124:340,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Zb,[2,117],{141:77,132:102,138:103,30:342,31:Da,133:D,135:A,139:E,159:ma, +160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,120]),{7:343,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45, +139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{39:344,40:r,41:n},{92:[1,346],99:345,104:Fb},{39:347,40:r,41:n},{29:[1,348]},a(lb,Ja,{69:349,70:nb}),a(Aa,[2,130]),{31:hb,33:280,34:g,100:351,101:278,103:Wa},a(Aa,[2,135],{102:[1,352]}),a(Aa,[2,137],{102:[1,353]}),{33:354,34:g},a(Ea,[2,141]),a(lb,Ja,{69:355,70:xb}),a(Aa,[2,150]),{31:ub,33:287,34:g,103:cb,106:357,108:285},a(Aa,[2,155],{102:[1,358]}),a(Aa,[2,158],{102:[1,359]}),{6:[1,361],7:360,8:122,10:20,11:21,12:b,13:23,14:24, +15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,362],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(yb,[2,147],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma, +160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{39:363,40:r,41:n},a(Ba,[2,200]),{6:qa,32:[1,364]},{7:365,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W, +132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a([12,28,34,38,40,41,44,45,48,49,50,51,52,53,61,62,63,67,68,92,95,97,105,112,117,118,119,125,129,130,133,135,137,139,149,155,157,158,159,160,161,162],Vb,{6:eb,31:eb,70:eb,120:eb}),{6:ob,31:pb,120:[1,366]},a([6,31,32,115,120],db,{15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,10:20,11:21,13:23,14:24,54:26,47:27,79:28,80:29,81:30,111:31,66:33,77:40,154:41,132:43, +136:44,138:45,74:53,75:54,37:55,43:57,33:70,60:71,141:77,39:80,8:122,76:179,7:254,123:369,12:b,28:ea,34:g,38:h,40:r,41:n,44:B,45:H,48:I,49:F,50:Q,51:x,52:J,53:O,61:R,62:z,63:l,67:c,68:w,73:Ua,92:m,95:k,97:K,105:P,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,133:D,135:A,137:q,139:E,149:ba,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}),a(Ra,Ja,{69:370,70:ib}),a(t,[2,168]),a([6,31,115],Ja,{69:371,70:ib}),a($b,[2,243]),{7:372,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14, +23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:373,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19, +28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:374,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h, +39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(jb,[2,219]),{33:194,34:g,60:195,74:196,75:197,92:m,118:wa,119:e,145:375},a([1,6,31,32,42,65,70,73,89,94,115,120,122,131,133,135,139,156],[2,226],{141:77,132:102,138:103,134:[1, +376],140:[1,377],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(zb,[2,227],{141:77,132:102,138:103,134:[1,378],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(zb,[2,233],{141:77,132:102,138:103,134:[1,379],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{6:ac,31:bc,94:[1,380]},a(Ab,db,{39:80,57:204,59:205,11:206,37:207,33:208,35:209,60:210,56:383, +34:g,36:Oa,38:h,40:r,41:n,62:z,118:wa}),{7:384,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,385],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v, +160:Y,161:S,162:M},{7:386,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:[1,387],33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M}, +a(Ba,[2,39]),a(Jb,[2,37]),a(La,[2,105]),{7:388,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,89:[2,179],92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v, +160:Y,161:S,162:M},{89:[2,180],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Ma,[2,49],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{32:[1,389],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{30:390,31:Da},a(Sa,[2,75]),{33:129, +34:g,60:130,71:391,72:127,73:d,74:131,75:132,92:m,118:wa,119:e},a(cc,p,{71:126,72:127,33:129,60:130,74:131,75:132,64:392,34:g,73:d,92:m,118:wa,119:e}),a(Sa,[2,80],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Qa,eb),a(Yb,[2,31]),{32:[1,393],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Ma,[2,273], +{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{30:394,31:Da,132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{30:395,31:Da},a(N,[2,194]),{30:396,31:Da},{30:397,31:Da},a(Bb,[2,198]),{32:[1,398],151:[1,399],152:339,153:gb},a(N,[2,237]),{30:400,31:Da},a(mb,[2,240]),{30:401,31:Da,70:[1,402]},a(dc,[2,190],{141:77,132:102,138:103,133:D, +135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(N,[2,118]),a(Zb,[2,121],{141:77,132:102,138:103,30:403,31:Da,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,[2,124]),{29:[1,404]},{31:hb,33:280,34:g,100:405,101:278,103:Wa},a(Ea,[2,125]),{39:406,40:r,41:n},{6:qb,31:rb,94:[1,407]},a(Ab,db,{33:280,101:410,34:g,103:Wa}),a(Ra,Ja,{69:411,70:nb}),{33:412,34:g}, +{33:413,34:g},{29:[2,140]},{6:Cb,31:Db,94:[1,414]},a(Ab,db,{33:287,108:417,34:g,103:cb}),a(Ra,Ja,{69:418,70:xb}),{33:419,34:g,103:[1,420]},{33:421,34:g},a(yb,[2,144],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:422,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q, +51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:423,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R, +62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Ea,[2,148]),{131:[1,424]},{120:[1,425],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(vb,[2,174]),{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9, +18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,123:426,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:254,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11, +20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,31:$a,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,73:Ua,74:53,75:54,76:179,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,116:427,117:V,118:X,119:G,123:177,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Qa,[2,183]),{6:ob,31:pb,32:[1,428]},{6:ob,31:pb,115:[1,429]}, +a(Xa,[2,203],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Xa,[2,205],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Xa,[2,216],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(jb,[2,225]),{7:430,8:122,10:20,11:21, +12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:431,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9, +18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:432,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14, +23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:433,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19, +28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(vb,[2,109]),{11:206,33:208,34:g,35:209,36:Oa,37:207,38:h,39:80,40:r,41:n,56:434,57:204,59:205,60:210,62:z,118:wa},a(cc,Hb,{39:80,56:203,57:204, +59:205,11:206,37:207,33:208,35:209,60:210,93:435,34:g,36:Oa,38:h,40:r,41:n,62:z,118:wa}),a(Aa,[2,112]),a(Aa,[2,52],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:436,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l, +66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(Aa,[2,54],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{7:437,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18, +27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{89:[2,178],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na, +173:ra,174:da},a(N,[2,50]),a(N,[2,67]),a(Sa,[2,76]),a(Ra,Ja,{69:438,70:Mb}),a(N,[2,272]),a($b,[2,244]),a(N,[2,195]),a(Bb,[2,196]),a(Bb,[2,197]),a(N,[2,235]),{30:439,31:Da},{32:[1,440]},a(mb,[2,241],{6:[1,441]}),{7:442,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30, +92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},a(N,[2,122]),{39:443,40:r,41:n},a(lb,Ja,{69:444,70:nb}),a(Ea,[2,126]),{29:[1,445]},{33:280,34:g,101:446,103:Wa},{31:hb,33:280,34:g,100:447,101:278,103:Wa},a(Aa,[2,131]),{6:qb,31:rb,32:[1,448]},a(Aa,[2,136]),a(Aa,[2,138]),a(Ea,[2,142],{29:[1,449]}),{33:287,34:g,103:cb,108:450},{31:ub,33:287,34:g,103:cb,106:451,108:285}, +a(Aa,[2,151]),{6:Cb,31:Db,32:[1,452]},a(Aa,[2,156]),a(Aa,[2,157]),a(Aa,[2,159]),a(yb,[2,145],{141:77,132:102,138:103,133:D,135:A,139:E,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),{32:[1,453],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},a(Ba,[2,201]),a(Ba,[2,177]),a(Qa,[2,184]),a(Ra,Ja,{69:454,70:ib}),a(Qa,[2,185]),a(t,[2,169]),a([1,6,31,32,42, +65,70,73,89,94,115,120,122,131,133,134,135,139,156],[2,228],{141:77,132:102,138:103,140:[1,455],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(zb,[2,230],{141:77,132:102,138:103,134:[1,456],159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,229],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,234],{141:77,132:102, +138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Aa,[2,113]),a(Ra,Ja,{69:457,70:Rb}),{32:[1,458],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{32:[1,459],132:102,133:D,135:A,138:103,139:E,141:77,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da},{6:Wb,31:Xb,32:[1,460]},{32:[1,461]},a(N, +[2,238]),a(mb,[2,242]),a(dc,[2,191],{141:77,132:102,138:103,133:D,135:A,139:E,156:za,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ea,[2,128]),{6:qb,31:rb,94:[1,462]},{39:463,40:r,41:n},a(Aa,[2,132]),a(Ra,Ja,{69:464,70:nb}),a(Aa,[2,133]),{39:465,40:r,41:n},a(Aa,[2,152]),a(Ra,Ja,{69:466,70:xb}),a(Aa,[2,153]),a(Ea,[2,146]),{6:ob,31:pb,32:[1,467]},{7:468,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16, +25:17,26:18,27:19,28:ea,33:70,34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{7:469,8:122,10:20,11:21,12:b,13:23,14:24,15:7,16:8,17:9,18:10,19:11,20:12,21:13,22:14,23:15,24:16,25:17,26:18,27:19,28:ea,33:70, +34:g,37:55,38:h,39:80,40:r,41:n,43:57,44:B,45:H,47:27,48:I,49:F,50:Q,51:x,52:J,53:O,54:26,60:71,61:R,62:z,63:l,66:33,67:c,68:w,74:53,75:54,77:40,79:28,80:29,81:30,92:m,95:k,97:K,105:P,111:31,112:L,117:V,118:X,119:G,125:aa,129:U,130:W,132:43,133:D,135:A,136:44,137:q,138:45,139:E,141:77,149:ba,154:41,155:ca,157:C,158:T,159:v,160:Y,161:S,162:M},{6:ac,31:bc,32:[1,470]},a(Aa,[2,53]),a(Aa,[2,55]),a(Sa,[2,77]),a(N,[2,236]),{29:[1,471]},a(Ea,[2,127]),{6:qb,31:rb,32:[1,472]},a(Ea,[2,149]),{6:Cb,31:Db,32:[1, +473]},a(Qa,[2,186]),a(Ma,[2,231],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Ma,[2,232],{141:77,132:102,138:103,159:ma,160:Z,163:fa,164:ia,165:ga,166:ja,167:la,168:oa,169:pa,170:ha,171:ka,172:na,173:ra,174:da}),a(Aa,[2,114]),{39:474,40:r,41:n},a(Aa,[2,134]),a(Aa,[2,154]),a(Ea,[2,129])],defaultActions:{68:[2,69],69:[2,70],238:[2,108],354:[2,140]},parseError:function(a,d){if(d.recoverable)this.trace(a);else{var e=function(a, +d){this.message=a;this.hash=d};e.prototype=Error;throw new e(a,d);}},parse:function(a){var d=[0],e=[null],b=[],p=this.table,t="",wa=0,c=0,g=0,Da=b.slice.call(arguments,1),k=Object.create(this.lexer),h={};for(f in this.yy)Object.prototype.hasOwnProperty.call(this.yy,f)&&(h[f]=this.yy[f]);k.setInput(a,h);h.lexer=k;h.parser=this;"undefined"==typeof k.yylloc&&(k.yylloc={});var f=k.yylloc;b.push(f);var l=k.options&&k.options.ranges;this.parseError="function"===typeof h.parseError?h.parseError:Object.getPrototypeOf(this).parseError; +for(var m,Ta,Ha,n,ua={},y,w;;){Ha=d[d.length-1];if(this.defaultActions[Ha])n=this.defaultActions[Ha];else{if(null===m||"undefined"==typeof m)m=k.lex()||1,"number"!==typeof m&&(m=this.symbols_[m]||m);n=p[Ha]&&p[Ha][m]}if("undefined"===typeof n||!n.length||!n[0]){w=[];for(y in p[Ha])this.terminals_[y]&&2=ta?this.wrapInBraces(d):d};b.prototype.compileRoot=function(a){var d,b;a.indent=a.bare?"":Ca;a.level=N;this.spaced=!0;a.scope=new xa(null,this,null,null!=(b=a.referencedVars)?b:[]);var e=a.locals||[];b=0;for(d=e.length;b=Fa?this.wrapInBraces(d): +d};return b}(w);f.StringLiteral=D=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.RegexLiteral=X=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.PassthroughLiteral=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.IdentifierLiteral=x=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.isAssignable=ha; +return b}(z);f.PropertyName=L=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.isAssignable=ha;return b}(z);f.StatementLiteral=W=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.isStatement=ha;b.prototype.makeReturn=na;b.prototype.jumps=function(a){if("break"===this.value&&!(null!=a&&a.loop||null!=a&&a.block)||"continue"===this.value&&(null==a||!a.loop))return this};b.prototype.compileNode=function(a){return[this.makeCode(""+ +this.tab+this.value+";")]};return b}(z);f.ThisLiteral=E=function(a){function b(){b.__super__.constructor.call(this,"this")}v(b,a);b.prototype.compileNode=function(a){var d;a=null!=(d=a.scope.method)&&d.bound?a.scope.method.context:this.value;return[this.makeCode(a)]};return b}(z);f.UndefinedLiteral=ca=function(a){function b(){b.__super__.constructor.call(this,"undefined")}v(b,a);b.prototype.compileNode=function(a){return[this.makeCode(a.level>=Ga?"(void 0)":"void 0")]};return b}(z);f.NullLiteral= +c=function(a){function b(){b.__super__.constructor.call(this,"null")}v(b,a);return b}(z);f.BooleanLiteral=b=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);return b}(z);f.Return=G=function(a){function b(a){this.expression=a}v(b,a);b.prototype.children=["expression"];b.prototype.isStatement=ha;b.prototype.makeReturn=na;b.prototype.jumps=na;b.prototype.compileToFragments=function(a,d){var p;var e=null!=(p=this.expression)?p.makeReturn():void 0;return!e||e instanceof +b?b.__super__.compileToFragments.call(this,a,d):e.compileToFragments(a,d)};b.prototype.compileNode=function(a){var b=[];b.push(this.makeCode(this.tab+("return"+(this.expression?" ":""))));this.expression&&(b=b.concat(this.expression.compileToFragments(a,Ka)));b.push(this.makeCode(";"));return b};return b}(sa);f.YieldReturn=T=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.compileNode=function(a){null==a.scope.parent&&this.error("yield can only occur inside functions"); +return b.__super__.compileNode.apply(this,arguments)};return b}(G);f.Value=C=function(a){function t(a,b,wa){if(!b&&a instanceof t)return a;this.base=a;this.properties=b||[];wa&&(this[wa]=!0);return this}v(t,a);t.prototype.children=["base","properties"];t.prototype.add=function(a){this.properties=this.properties.concat(a);return this};t.prototype.hasProperties=function(){return!!this.properties.length};t.prototype.bareLiteral=function(a){return!this.properties.length&&this.base instanceof a};t.prototype.isArray= +function(){return this.bareLiteral(q)};t.prototype.isRange=function(){return this.bareLiteral(V)};t.prototype.isComplex=function(){return this.hasProperties()||this.base.isComplex()};t.prototype.isAssignable=function(){return this.hasProperties()||this.base.isAssignable()};t.prototype.isNumber=function(){return this.bareLiteral(w)};t.prototype.isString=function(){return this.bareLiteral(D)};t.prototype.isRegex=function(){return this.bareLiteral(X)};t.prototype.isUndefined=function(){return this.bareLiteral(ca)}; +t.prototype.isNull=function(){return this.bareLiteral(c)};t.prototype.isBoolean=function(){return this.bareLiteral(b)};t.prototype.isAtomic=function(){var a;var b=this.properties.concat(this.base);var wa=0;for(a=b.length;wathis.properties.length&&!this.base.isComplex()&&(null==p||!p.isComplex()))return[this,this];b=new t(this.base,this.properties.slice(0,-1));if(b.isComplex()){var e=new x(a.scope.freeVariable("base"));b=new t(new P(new y(e, +b)))}if(!p)return[b,e];if(p.isComplex()){var c=new x(a.scope.freeVariable("name"));p=new R(new y(c,p.index));c=new R(c)}return[b.add(p),new t(e||b.base,[c||p])]};t.prototype.compileNode=function(a){var b;this.base.front=this.front;var p=this.properties;var e=this.base.compileToFragments(a,p.length?Ga:null);p.length&&Pa.test(da(e))&&e.push(this.makeCode("."));var t=0;for(b=p.length;t=Math.abs(this.fromNum-this.toNum)){var c=function(){e=[];for(var a=p=this.fromNum,b=this.toNum;p<=b?a<=b:a>=b;p<=b?a++:a--)e.push(a);return e}.apply(this);this.exclusive&&c.pop();return[this.makeCode("["+c.join(", ")+"]")]}var t=this.tab+Ca;var f=a.scope.freeVariable("i",{single:!0});var g=a.scope.freeVariable("results");var k="\n"+t+g+" \x3d [];";if(b)a.index=f,b=da(this.compileNode(a));else{var h= +f+" \x3d "+this.fromC+(this.toC!==this.toVar?", "+this.toC:"");b=this.fromVar+" \x3c\x3d "+this.toVar;b="var "+h+"; "+b+" ? "+f+" \x3c"+this.equals+" "+this.toVar+" : "+f+" \x3e"+this.equals+" "+this.toVar+"; "+b+" ? "+f+"++ : "+f+"--"}f="{ "+g+".push("+f+"); }\n"+t+"return "+g+";\n"+a.indent;a=function(a){return null!=a?a.contains(Va):void 0};if(a(this.from)||a(this.to))c=", arguments";return[this.makeCode("(function() {"+k+"\n"+t+"for ("+b+")"+f+"}).apply(this"+(null!=c?c:"")+")")]};return b}(sa); +f.Slice=aa=function(a){function b(a){this.range=a;b.__super__.constructor.call(this)}v(b,a);b.prototype.children=["range"];b.prototype.compileNode=function(a){var b=this.range;var p=b.to;var e=(b=b.from)&&b.compileToFragments(a,Ka)||[this.makeCode("0")];if(p){b=p.compileToFragments(a,Ka);var c=da(b);if(this.range.exclusive||-1!==+c)var t=", "+(this.range.exclusive?c:p.isNumber()?""+(+c+1):(b=p.compileToFragments(a,Ga),"+"+da(b)+" + 1 || 9e9"))}return[this.makeCode(".slice("+da(e)+(t||"")+")")]};return b}(sa); +f.Obj=m=function(a){function b(a,b){this.generated=null!=b?b:!1;this.objects=this.properties=a||[]}v(b,a);b.prototype.children=["properties"];b.prototype.compileNode=function(a){var b,p,e;var c=this.properties;if(this.generated){var t=0;for(b=c.length;t= +Fa?this.wrapInBraces(t):t}var h=g[0];1===e&&h instanceof H&&h.error("Destructuring assignment has no target");var m=this.variable.isObject();if(p&&1===e&&!(h instanceof U)){var l=null;if(h instanceof b&&"object"===h.context){t=h;var n=t.variable;var q=n.base;h=t.value;h instanceof b&&(l=h.value,h=h.variable)}else h instanceof b&&(l=h.value,h=h.variable),q=m?h["this"]?h.properties[0].name:new L(h.unwrap().value):new w(0);var r=q.unwrap()instanceof L;f=new C(f);f.properties.push(new (r?qa:R)(q));(c= +za(h.unwrap().value))&&h.error(c);l&&(f=new k("?",f,l));return(new b(h,f,null,{param:this.param})).compileToFragments(a,N)}var v=f.compileToFragments(a,ta);var y=da(v);t=[];n=!1;f.unwrap()instanceof x&&!this.variable.assigns(y)||(t.push([this.makeCode((l=a.scope.freeVariable("ref"))+" \x3d ")].concat(M.call(v))),v=[this.makeCode(l)],y=l);l=f=0;for(d=g.length;fN?this.wrapInBraces(e):e};return b}(sa);f.Code=h=function(b){function c(b,d,c){this.params=b||[];this.body=d||new a;this.bound="boundfunc"===c;this.isGenerator=!!this.body.contains(function(a){return a instanceof k&&a.isYield()|| +a instanceof T})}v(c,b);c.prototype.children=["params","body"];c.prototype.isStatement=function(){return!!this.ctor};c.prototype.jumps=ka;c.prototype.makeScope=function(a){return new xa(a,this.body,this)};c.prototype.compileNode=function(b){var d,f,e,g;this.bound&&null!=(d=b.scope.method)&&d.bound&&(this.context=b.scope.method.context);if(this.bound&&!this.context)return this.context="_this",d=new c([new K(new x(this.context))],new a([this])),d=new ya(d,[new E]),d.updateLocationDataIfMissing(this.locationData), +d.compileNode(b);b.scope=la(b,"classScope")||this.makeScope(b.scope);b.scope.shared=la(b,"sharedScope");b.indent+=Ca;delete b.bare;delete b.isExistentialEquals;d=[];var p=[];var h=this.params;var t=0;for(e=h.length;t=Ga?this.wrapInBraces(p):p};c.prototype.eachParamName=function(a){var b;var c=this.params;var e=[];var f=0;for(b=c.length;f=d.length)return[];if(1===d.length)return e=d[0],d=e.compileToFragments(a,ta),c?d:[].concat(e.makeCode(Ia("slice",a)+".call("),d,e.makeCode(")"));c=d.slice(f);var h=g=0;for(p=c.length;g< +p;h=++g){e=c[h];var k=e.compileToFragments(a,ta);c[h]=e instanceof b?[].concat(e.makeCode(Ia("slice",a)+".call("),k,e.makeCode(")")):[].concat(e.makeCode("["),k,e.makeCode("]"))}if(0===f)return e=d[0],a=e.joinFragmentArrays(c.slice(1),", "),c[0].concat(e.makeCode(".concat("),a,e.makeCode(")"));g=d.slice(0,f);p=[];k=0;for(h=g.length;k=Ga)return(new P(this)).compileToFragments(a);var f="+"===c||"-"===c;("new"===c||"typeof"===c||"delete"===c||f&&this.first instanceof b&&this.first.operator===c)&&d.push([this.makeCode(" ")]);if(f&&this.first instanceof b||"new"===c&&this.first.isStatement(a))this.first=new P(this.first);d.push(this.first.compileToFragments(a,Fa));this.flip&&d.reverse();return this.joinFragmentArrays(d,"")};b.prototype.compileYield=function(a){var b; +var d=[];var c=this.operator;null==a.scope.parent&&this.error("yield can only occur inside functions");0<=S.call(Object.keys(this.first),"expression")&&!(this.first instanceof ba)?null!=this.first.expression&&d.push(this.first.expression.compileToFragments(a,Fa)):(a.level>=Ka&&d.push([this.makeCode("(")]),d.push([this.makeCode(c)]),""!==(null!=(b=this.first.base)?b.value:void 0)&&d.push([this.makeCode(" ")]),d.push(this.first.compileToFragments(a,Fa)),a.level>=Ka&&d.push([this.makeCode(")")]));return this.joinFragmentArrays(d, +"")};b.prototype.compilePower=function(a){var b=new C(new x("Math"),[new qa(new L("pow"))]);return(new ya(b,[this.first,this.second])).compileToFragments(a)};b.prototype.compileFloorDivision=function(a){var d=new C(new x("Math"),[new qa(new L("floor"))]);var c=this.second.isComplex()?new P(this.second):this.second;c=new b("/",this.first,c);return(new ya(d,[c])).compileToFragments(a)};b.prototype.compileModulo=function(a){var b=new C(new z(Ia("modulo",a)));return(new ya(b,[this.first,this.second])).compileToFragments(a)}; +b.prototype.toString=function(a){return b.__super__.toString.call(this,a,this.constructor.name+" "+this.operator)};return b}(sa);f.In=O=function(a){function b(a,b){this.object=a;this.array=b}v(b,a);b.prototype.children=["object","array"];b.prototype.invert=ra;b.prototype.compileNode=function(a){var b;if(this.array instanceof C&&this.array.isArray()&&this.array.base.objects.length){var c=this.array.base.objects;var e=0;for(b=c.length;e=c.length)?c:this.wrapInBraces(c)};return b}(sa); +f.StringWithInterpolations=A=function(a){function b(){return b.__super__.constructor.apply(this,arguments)}v(b,a);b.prototype.compileNode=function(a){var d;if(!a.inTaggedTemplateCall)return b.__super__.compileNode.apply(this,arguments);var c=this.body.unwrap();var e=[];c.traverseChildren(!1,function(a){if(a instanceof D)e.push(a);else if(a instanceof P)return e.push(a),!1;return!0});c=[];c.push(this.makeCode("`"));var f=0;for(d=e.length;fh,this.step&&null!=h&&e||(d=n.freeVariable("len")),K=""+t+f+" \x3d 0, "+d+" \x3d "+A+".length",w=""+t+f+" \x3d "+A+".length - 1",d=f+" \x3c "+d,n=f+" \x3e\x3d 0",this.step?(null!=h?e&&(d= +n,K=w):(d=r+" \x3e 0 ? "+d+" : "+n,K="("+r+" \x3e 0 ? ("+K+") : "+w+")"),f=f+" +\x3d "+r):f=""+(q!==f?"++"+f:f+"++"),K=[this.makeCode(K+"; "+d+"; "+t+f)])}if(this.returns){var B=""+this.tab+c+" \x3d [];\n";var V="\n"+this.tab+"return "+c+";";l.makeReturn(c)}this.guard&&(1=Na?this.wrapInBraces(e):e};c.prototype.unfoldSoak=function(){return this.soak&&this};return c}(sa);var gc={extend:function(a){return"function(child, parent) { for (var key in parent) { if ("+Ia("hasProp",a)+".call(parent, key)) child[key] \x3d parent[key]; } function ctor() { this.constructor \x3d child; } ctor.prototype \x3d parent.prototype; child.prototype \x3d new ctor(); child.__super__ \x3d parent.prototype; return child; }"},bind:function(){return"function(fn, me){ return function(){ return fn.apply(me, arguments); }; }"}, +indexOf:function(){return"[].indexOf || function(item) { for (var i \x3d 0, l \x3d this.length; i \x3c l; i++) { if (i in this \x26\x26 this[i] \x3d\x3d\x3d item) return i; } return -1; }"},modulo:function(){return"function(a, b) { return (+a % (b \x3d +b) + b) % b; }"},hasProp:function(){return"{}.hasOwnProperty"},slice:function(){return"[].slice"}};var N=1;var Ka=2;var ta=3;var Na=4;var Fa=5;var Ga=6;var Ca=" ";var Pa=/^[+-]?\d+$/;var Ia=function(a,b){var c=b.scope.root;if(a in c.utilities)return c.utilities[a]; +var d=c.freeVariable(a);c.assign(d,gc[a](b));return c.utilities[a]=d};var Ea=function(a,b){a=a.replace(/\n/g,"$\x26"+b);return a.replace(/\s+$/,"")};var Va=function(a){return a instanceof x&&"arguments"===a.value};var ea=function(a){return a instanceof E||a instanceof h&&a.bound||a instanceof va};var Ya=function(a){return a.isComplex()||("function"===typeof a.isAssignable?a.isAssignable():void 0)};var Ba=function(a,b,c){if(a=b[c].unfoldSoak(a))return b[c]=a.body,a.body=new C(b),a}}).call(this);return f}(); +u["./sourcemap"]=function(){var f={};(function(){var u=function(){function f(f){this.line=f;this.columns=[]}f.prototype.add=function(f,a,b){var q=a[0];a=a[1];null==b&&(b={});if(!this.columns[f]||!b.noReplace)return this.columns[f]={line:this.line,column:f,sourceLine:q,sourceColumn:a}};f.prototype.sourceLocation=function(f){for(var a;!((a=this.columns[f])||0>=f);)f--;return a&&[a.sourceLine,a.sourceColumn]};return f}();f=function(){function f(){this.lines=[]}f.prototype.add=function(f,a,b){var q;null== +b&&(b={});var g=a[0];a=a[1];return((q=this.lines)[g]||(q[g]=new u(g))).add(a,f,b)};f.prototype.sourceLocation=function(f){var a;var b=f[0];for(f=f[1];!((a=this.lines[b])||0>=b);)b--;return a&&a.sourceLocation(f)};f.prototype.generate=function(f,a){var b,q,g,h,r,n,u;null==f&&(f={});null==a&&(a=null);var y=g=q=u=0;var I=!1;var F="";var Q=this.lines;var x=b=0;for(h=Q.length;bf?1:0);a||!b;)f=a&31,(a>>=5)&&(f|=32),b+=this.encodeBase64(f);return b};f.prototype.encodeBase64=function(f){var a;if(!(a= +"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"[f]))throw Error("Cannot Base64 encode value: "+f);return a};return f}()}).call(this);return f}();u["./coffee-script"]=function(){var f={};(function(){var qa,q,y={}.hasOwnProperty;var a=u("fs");var b=u("vm");var ya=u("path");var g=u("./lexer").Lexer;var h=u("./parser").parser;var r=u("./helpers");var n=u("./sourcemap");var B=u("../../package.json");f.VERSION=B.version;f.FILE_EXTENSIONS=[".coffee",".litcoffee",".coffee.md"];f.helpers= +r;var H=function(a){switch(!1){case "function"!==typeof Buffer:return(new Buffer(a)).toString("base64");case "function"!==typeof btoa:return btoa(encodeURIComponent(a).replace(/%([0-9A-F]{2})/g,function(a,b){return String.fromCharCode("0x"+b)}));default:throw Error("Unable to base64 encode inline sourcemap.");}};B=function(a){return function(b,f){null==f&&(f={});try{return a.call(this,b,f)}catch(m){if("string"!==typeof b)throw m;throw r.updateSyntaxError(m,b,f.filename);}}};var I={};var F={};f.compile= +qa=B(function(a,b){var c,f,g,l;var q=r.extend;b=q({},b);var u=b.sourceMap||b.inlineMap||null==b.filename;q=b.filename||"\x3canonymous\x3e";I[q]=a;u&&(g=new n);var x=O.tokenize(a,b);var y=b;var G=[];var z=0;for(c=x.length;z